0

I am stuck with below error, while running the spark job.

Exception in thread "main" java.lang.NoSuchMethodError: scala.Predef$.$scope()Lscala/xml/TopScope$;
    at org.apache.spark.ui.jobs.AllJobsPage.<init>(AllJobsPage.scala:39)
    at org.apache.spark.ui.jobs.JobsTab.<init>(JobsTab.scala:38)
    at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:65)
    at org.apache.spark.ui.SparkUI.<init>(SparkUI.scala:82)

To resolve this issue I have downgraded spark version to 2.10 (As suggested in another post for the same issue)

But when I am downgrading Scala to 2.10 in spark I am facing with other error related to guava Exception in thread "main" java.lang.NoClassDefFoundError: org/spark_project/guava/cache/CacheLoader

Here my pom file

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <groupId>com.iot.app.kafka</groupId>
    <artifactId>iot-kafka-producer</artifactId>
    <packaging>jar</packaging>
    <version>1.0.0</version>
    <name>IoT Kafka Producer</name>


    <dependencies>
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>0.9.0.0</version>
        </dependency>



        <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-core_2.11 -->
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.11</artifactId>
            <version>1.5.2</version>
        </dependency>


        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-network-common_2.11</artifactId>
            <version>2.1.0</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-streaming-kafka_2.11 -->
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming-kafka_2.11</artifactId>
            <version>1.6.3</version>
        </dependency>



        <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-streaming_2.11 -->
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming_2.11</artifactId>
            <version>1.6.3</version>
        </dependency>


        <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.2.0</version>
        </dependency>



        <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-core_2.11 -->
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.11</artifactId>
            <version>1.6.3</version>
        </dependency>



        <dependency>
            <groupId>org.spark-project.spark</groupId>
            <artifactId>unused</artifactId>
            <version>1.0.0</version>
            <scope>provided</scope>
        </dependency>



        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-core</artifactId>
            <version>2.6.6</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
            <version>2.6.6</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-annotations</artifactId>
            <version>2.6.6</version>
        </dependency>
        <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.17</version>
        </dependency>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.12</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
        <dependency>
            <groupId>com.google.guava</groupId>
            <artifactId>guava</artifactId>
            <version>14.0.1</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.onosproject/onos-api -->
        <dependency>
            <groupId>org.onosproject</groupId>
            <artifactId>onos-api</artifactId>
            <version>1.6.0</version>
        </dependency>



        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-library</artifactId>
            <version>2.11.0</version>
        </dependency>


        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming-kafka-0-8-assembly_2.11</artifactId>
            <version>2.0.0</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka_2.10 -->
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka_2.10</artifactId>
            <version>0.8.0</version>
        </dependency>



    </dependencies>


    <properties>
        <maven.compiler.source>1.8</maven.compiler.source>
        <maven.compiler.target>1.8</maven.compiler.target>
    </properties>


</project>

Exception in thread "main" java.lang.NoSuchMethodError: scala.Predef$.$scope()Lscala/xml/TopScope$;
    at org.apache.spark.ui.jobs.AllJobsPage.<init>(AllJobsPage.scala:39)
    at org.apache.spark.ui.jobs.JobsTab.<init>(JobsTab.scala:38)
    at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:65)
    at org.apache.spark.ui.SparkUI.<init>(SparkUI.scala:82)
    at org.apache.spark.ui.SparkUI$.create(SparkUI.scala:220)
    at org.apache.spark.ui.SparkUI$.createLiveUI(SparkUI.scala:162)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:452)
Cœur
  • 37,241
  • 25
  • 195
  • 267
user3837415
  • 41
  • 3
  • 15
  • can you use **same spark and scala version** for all dependencies ? – mrsrinivas Mar 01 '17 at 07:14
  • @mrsrinivas : Thanks for the comment. I have not understood your comment (Spark 2.0 and above works well from Scala 2.11) I am new to spark completely.. – user3837415 Mar 01 '17 at 07:31

0 Answers0