0

i want read a record from a Cassandra db using Spark in java. This is my code.

import com.datastax.driver.core.Session;
import com.datastax.spark.connector.japi.CassandraJavaUtil.*;
import com.datastax.spark.connector.japi.CassandraRow;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import java.lang.reflect.Field;
import java.util.*;
import static com.datastax.spark.connector.japi.CassandraJavaUtil.*;

public class ProvaCass {
    public static void main (String[] args) {

        SparkConf conf = new SparkConf(true)
                .set("spark.cassandra.connection.host", "127.0.0.1")
                .set("spark.cassandra.auth.username", "cassandra")
                .set("spark.cassandra.auth.password", "cassandra");
        conf.setAppName("Java API demo");
        conf.setMaster("local[4]");
        JavaSparkContext sc = new JavaSparkContext(conf);

        System.out.println("PRIMA");
        JavaRDD<String> cassandraRowsRDD = javaFunctions(sc).cassandraTable("prova", "emp")
                .map(new Function<CassandraRow, String>() {
                    @Override
                    public String call(CassandraRow cassandraRow) throws Exception {
                        return cassandraRow.toString();
                    }
                });
        String l=cassandraRowsRDD.first();
        System.out.println("Data as CassandraRows: \n" +l);
    }
}

I add correctly(?) connetor spark-cassandra (file .jar), and then i wrote the code, i copied it from this site for test my code. I entered a record in a table in the cassandra db that i have in local and i saw that the record is present. When i run program it don't work and i have this exception.could you tell me step by step how to solve? I do not know what to think, thanks....

Exception in thread "main" java.lang.NoClassDefFoundError: org/joda/time/DateTime at java.lang.Class.getDeclaredMethods0(Native Method) at java.lang.Class.privateGetDeclaredMethods(Class.java:2701) at java.lang.Class.getDeclaredMethod(Class.java:2128) at java.io.ObjectStreamClass.getPrivateMethod(ObjectStreamClass.java:1575) at java.io.ObjectStreamClass.access$1700(ObjectStreamClass.java:79) at java.io.ObjectStreamClass$2.run(ObjectStreamClass.java:508) at java.io.ObjectStreamClass$2.run(ObjectStreamClass.java:482) at java.security.AccessController.doPrivileged(Native Method) at java.io.ObjectStreamClass.(ObjectStreamClass.java:482) at java.io.ObjectStreamClass.lookup(ObjectStreamClass.java:379) at java.io.ObjectOutputStream.writeClass(ObjectOutputStream.java:1213) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1120) at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548) at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509) at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178) at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548) at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509) at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178) at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548) at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509) at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178) at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:348) at scala.collection.immutable.List$SerializationProxy.writeObject(List.scala:468) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at java.io.ObjectStreamClass.invokeWriteObject(ObjectStreamClass.java:1128) at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1496) at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178) at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548) at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509) at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178) at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548) at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509) at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178) at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548) at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509) at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178) at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:348) at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:43) at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:100) at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:342) at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:335) at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:159) at org.apache.spark.SparkContext.clean(SparkContext.scala:2299) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2073) at org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1358) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.RDD.take(RDD.scala:1331) at org.apache.spark.rdd.RDD$$anonfun$first$1.apply(RDD.scala:1372) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.RDD.first(RDD.scala:1371) at org.apache.spark.api.java.JavaRDDLike$class.first(JavaRDDLike.scala:538) at org.apache.spark.api.java.AbstractJavaRDDLike.first(JavaRDDLike.scala:45) at ProvaCass.main(ProvaCass.java:30) Caused by: java.lang.ClassNotFoundException: org.joda.time.DateTime at java.net.URLClassLoader.findClass(URLClassLoader.java:381) at java.lang.ClassLoader.loadClass(ClassLoader.java:424) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) at java.lang.ClassLoader.loadClass(ClassLoader.java:357) ... 67 more

Francesco
  • 129
  • 1
  • 10
  • You are missing a dependency at runtime. That is a super basic thing. You should spent some serious time learning such basics, before going for such advanced topics like spark and cassandra. – GhostCat Aug 16 '18 at 19:09
  • The point is: your dependencies have dependencies themselves, and typically, those are **not** included in the "core" JAR of your dependency. You have to understand the dependencies, and make sure they can be resolved! – GhostCat Aug 16 '18 at 19:10
  • Well, I have to control which jar file spark-cassandra-connector use and add this jar as dependencies of the my project. It is correct? Thanks – Francesco Aug 16 '18 at 19:21
  • Basically you should learn how a build system like maven or gradle works. Maven knows how to fetch the dependencies for a component, and it can make sure to get everything you need. – GhostCat Aug 16 '18 at 19:25
  • I know, but when I insert maven dependencies in pom file, maven don't find it (mark of red the version). I saw on internet that the datastax's compatability table on git is wrong. For this reason I added the jar file of the connector. Do You know how I can resolve error of the tag version in maven dependencies ? – Francesco Aug 16 '18 at 19:43
  • 1
    Then you were asking the wrong question. This questions reads like "I wrote this code and now that exception happens. what does that mean?" ... If you have a maven issue, you should write a question going "here is my maven config, my setup looks like this and that ... but at runtime, the joda time dependency isn't resolved". Something like that. So, my recommendation: write up a new question, after you ensured that this problem isnt resolved in existing questions. – GhostCat Aug 17 '18 at 07:20

0 Answers0