2

even after adding the dependencies at runtime I'm getting follow error

Exception in thread "main" java.lang.NoClassDefFoundError: com/cloudera/spark/hbase/HBaseContext
    at Spark_HBase.SparkHBaseExample$.main(SparkHBaseExample.scala:36)
    at Spark_HBase.SparkHBaseExample.main(SparkHBaseExample.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:606)
    at org.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:358)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.ClassNotFoundException: com.cloudera.spark.hbase.HBaseContext
    at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:358)

Code:

import com.cloudera.spark.hbase.HBaseContext
    import org.apache.hadoop.hbase.HBaseConfiguration
    import org.apache.hadoop.hbase.client.{Scan, HTable, Get}
    import org.apache.hadoop.hbase.mapreduce.TableInputFormat
    import org.apache.hadoop.hbase.util.Bytes
    import org.apache.spark.{SparkContext, SparkConf}


    object SparkHBaseExample {

      def main(args: Array[String]) {


        val sparkConf = new SparkConf().setAppName("HBaseSpark").setMaster("local[2]")
        val sc = new SparkContext(sparkConf)

        val conf = HBaseConfiguration.create()
        conf.set(TableInputFormat.INPUT_TABLE, "tmp")


        var table = new HTable(conf, "tmp");
        var hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable], classOf[org.apache.hadoop.hbase.client.Result])

        var g= new Get(Bytes.toBytes("abc"))

        var result=table.get(g)
        var value = result.getValue(Bytes.toBytes("cf"),Bytes.toBytes("val"));

        var name = Bytes.toString(value)
        println("############################################################"+name)

        val hbaseContext = new HBaseContext(sc, conf)
        var scan = new Scan()
    scan.setCaching(100)

      var getRdd = hbaseContext.hbaseRDD("tmp", scan)
    println("############################GETRDD################################")
    getRdd.foreach(println(_))}}
pedrorijo91
  • 7,635
  • 9
  • 44
  • 82
Gunjan
  • 19
  • 6
  • How did you add the dependencies at runtime ? Can you share the submit command ? – eliasah Apr 05 '16 at 12:32
  • I added all the hbase related jars in spark-default.conf file ./spark-submit --master local[2] --class Spark_HBase.SparkHBaseExample /home/gunjan.dhawas/SparkHBase.jar – Gunjan Apr 05 '16 at 12:36
  • I'm able to connect to hbase... I added the following line in spark-default.conf: spark.executor.extraJavaOptions -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three" spark.executor.extraClassPath /usr/hdp/2.3.4.0-3485/hive/lib/hive-hbase-handler.jar:/usr/hdp/2.3.4.0-3485/hbase/lib/hbase-server.jar:/usr/hdp/2.3.4.0-3485/hbase/lib – Gunjan Apr 05 '16 at 12:38
  • To be honest, I never used the spark-default.conf. For me, it is default for a reason, when you can to override options, you can do it explicitly with --jars per example when you submit or an alternative is using assembly plugins with your favorite build manager to build an uber-jar which will contains dependencies. – eliasah Apr 05 '16 at 12:41
  • yes I tried with --jars but it wasn't working... Without HBaseContext its working fine, even with this code I'm getting partial output . thank you. – Gunjan Apr 05 '16 at 12:42
  • can you show us the sbt dependencies? and how did you add the runtime dependencies? there may be a conflict – pedrorijo91 Apr 05 '16 at 15:45

0 Answers0