Aleksey Kiselev Aleksey Kiselev - 2 months ago 71
Java Question

Spark and Cassandra Java application: Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/sql/Dataset

I got an amazingly siplme java application which I almost copied from this one example: http://markmail.org/download.xqy?id=zua6upabiylzeetp&number=2

All I wanted to do is to read the table data and display in the Eclipse console.

My pom.xml:

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>chat_connaction_test</groupId>
<artifactId>ChatSparkConnectionTest</artifactId>
<version>0.0.1-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>3.1.0</version>
</dependency>

<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>2.0.0</version>
</dependency>

<!-- https://mvnrepository.com/artifact/com.datastax.spark/spark-cassandra-connector_2.10 -->
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector_2.10</artifactId>
<version>2.0.0-M3</version>
</dependency>

<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-streaming_2.10 -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<version>2.0.0</version>
</dependency>
<!--
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.10</artifactId>
<version>1.5.2</version>
</dependency>
-->
</dependencies>
</project>


And my java code:

package com.chatSparkConnactionTest;

import static com.datastax.spark.connector.japi.CassandraJavaUtil.javaFunctions;
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import com.datastax.spark.connector.japi.CassandraRow;

public class JavaDemo implements Serializable {
private static final long serialVersionUID = 1L;
public static void main(String[] args) {

SparkConf conf = new SparkConf().
setAppName("chat").
setMaster("local").
set("spark.executor.memory","1g").
set("spark.cassandra.connection.host", "127.0.0.1");
JavaSparkContext sc = new JavaSparkContext(conf);

JavaRDD<String> cassandraRowsRDD = javaFunctions(sc).cassandraTable(
"chat", "dictionary")

.map(new Function<CassandraRow, String>() {
@Override
public String call(CassandraRow cassandraRow) throws Exception {
String tempResult = cassandraRow.toString();
System.out.println(tempResult);
return tempResult;
}
}
);
System.out.println("Data as CassandraRows: \n" +
cassandraRowsRDD.collect().size()); // THIS IS A LINE WITH ERROR
}
}


And here is my error:


16/10/05 20:49:18 INFO CassandraConnector: Connected to Cassandra
cluster: Test Cluster Exception in thread "main"
java.lang.NoClassDefFoundError: org/apache/spark/sql/Dataset at
java.lang.Class.getDeclaredMethods0(Native Method) at
java.lang.Class.privateGetDeclaredMethods(Unknown Source) at
java.lang.Class.getDeclaredMethod(Unknown Source) at
java.io.ObjectStreamClass.getPrivateMethod(Unknown Source) at
java.io.ObjectStreamClass.access$1700(Unknown Source) at
java.io.ObjectStreamClass$2.run(Unknown Source) at
java.io.ObjectStreamClass$2.run(Unknown Source) at
java.security.AccessController.doPrivileged(Native Method) at
java.io.ObjectStreamClass.(Unknown Source) at
java.io.ObjectStreamClass.lookup(Unknown Source) at
java.io.ObjectOutputStream.writeObject0(Unknown Source) at
java.io.ObjectOutputStream.defaultWriteFields(Unknown Source) at
java.io.ObjectOutputStream.writeSerialData(Unknown Source) at
java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at
java.io.ObjectOutputStream.writeObject0(Unknown Source) at
java.io.ObjectOutputStream.defaultWriteFields(Unknown Source) at
java.io.ObjectOutputStream.writeSerialData(Unknown Source) at
java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at
java.io.ObjectOutputStream.writeObject0(Unknown Source) at
java.io.ObjectOutputStream.writeObject(Unknown Source) at
scala.collection.immutable.$colon$colon.writeObject(List.scala:379)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source) at
sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source) at
java.lang.reflect.Method.invoke(Unknown Source) at
java.io.ObjectStreamClass.invokeWriteObject(Unknown Source) at
java.io.ObjectOutputStream.writeSerialData(Unknown Source) at
java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at
java.io.ObjectOutputStream.writeObject0(Unknown Source) at
java.io.ObjectOutputStream.defaultWriteFields(Unknown Source) at
java.io.ObjectOutputStream.writeSerialData(Unknown Source) at
java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at
java.io.ObjectOutputStream.writeObject0(Unknown Source) at
java.io.ObjectOutputStream.defaultWriteFields(Unknown Source) at
java.io.ObjectOutputStream.writeSerialData(Unknown Source) at
java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at
java.io.ObjectOutputStream.writeObject0(Unknown Source) at
java.io.ObjectOutputStream.defaultWriteFields(Unknown Source) at
java.io.ObjectOutputStream.writeSerialData(Unknown Source) at
java.io.ObjectOutputStream.writeOrdinaryObject(Unknown Source) at
java.io.ObjectOutputStream.writeObject0(Unknown Source) at
java.io.ObjectOutputStream.writeObject(Unknown Source) at
org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:43)
at
org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:100)
at
org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:295)
at
org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:288)
at
org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:108)
at org.apache.spark.SparkContext.clean(SparkContext.scala:2037) at
org.apache.spark.SparkContext.runJob(SparkContext.scala:1896) at
org.apache.spark.SparkContext.runJob(SparkContext.scala:1911) at
org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:893) at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:358) at
org.apache.spark.rdd.RDD.collect(RDD.scala:892) at
org.apache.spark.api.java.JavaRDDLike$class.collect(JavaRDDLike.scala:360)
at
org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45)
at com.chatSparkConnactionTest.JavaDemo.main(JavaDemo.java:37) Caused
by: java.lang.ClassNotFoundException: org.apache.spark.sql.Dataset at
java.net.URLClassLoader.findClass(Unknown Source) at
java.lang.ClassLoader.loadClass(Unknown Source) at
sun.misc.Launcher$AppClassLoader.loadClass(Unknown Source) at
java.lang.ClassLoader.loadClass(Unknown Source) ... 58 more


I made my pom.xml updated but that didn't resolve the error. Could anybody help me to resolve this problem?

Thank you!

Update 1:
here is my build path screenshot:
Link to my screenshot

Answer

You are getting "java.lang.NoClassDefFoundError: org/apache/spark/sql/Dataset" error because "spark-sql" dependency is missing from your pom.xml file.

If you want to read Cassandra table with Spark 2.0.0 then you need below minimum dependencies.

<dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-core_2.11</artifactId>
    <version>2.0.0</version>
</dependency>
<dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-sql_2.11</artifactId>
    <version>2.0.0</version>
</dependency>
<dependency>
    <groupId>com.datastax.spark</groupId>
    <artifactId>spark-cassandra-connector_2.11</artifactId>
    <version>2.0.0-M3</version>
</dependency>

Spark 2.0.0 provides SparkSession and Dataset API. Below is the sample program for reading Cassandra table and printing the records.

 public class SparkCassandraDatasetApplication {
 public static void main(String[] args) {
 SparkSession spark = SparkSession
          .builder()
          .appName("SparkCassandraDatasetApplication")
          .config("spark.sql.warehouse.dir", "/file:C:/temp")
          .config("spark.cassandra.connection.host", "127.0.0.1")
          .config("spark.cassandra.connection.port", "9042")
          .master("local[2]")
          .getOrCreate();

 //Read data
 Dataset<Row> dataset = spark.read().format("org.apache.spark.sql.cassandra")
        .options(new HashMap<String, String>() {
            {
                put("keyspace", "mykeyspace");
                put("table", "mytable");
            }
        }).load();

   //Print data
   dataset.show();       
   spark.stop();
   }        
}

If you still want to use RDD then use below sample program.

public class SparkCassandraRDDApplication {
public static void main(String[] args) {
    SparkConf conf = new SparkConf()
            .setAppName("SparkCassandraRDDApplication")
            .setMaster("local[2]")
            .set("spark.cassandra.connection.host", "127.0.0.1")
            .set("spark.cassandra.connection.port", "9042");

    JavaSparkContext sc = new JavaSparkContext(conf);

    //Read
    JavaRDD<UserData> resultsRDD = javaFunctions(sc).cassandraTable("mykeyspace", "mytable",CassandraJavaUtil.mapRowTo(UserData.class));

    //Print
    resultsRDD.foreach(data -> {
        System.out.println(data.id);
        System.out.println(data.username);
    });

    sc.stop();
  }
}

Javabean (UserData) used in above program is like below.

public class UserData implements Serializable{  
  String id;
  String username;     
  public String getId() {
      return id;
  }
  public void setId(String id) {
      this.id = id;
  }
  public String getUsername() {
     return username;
  }
  public void setUsername(String username) {
     this.username = username;
   }    
}