i rewrite this code:
import org.apache.spark.sql.SparkSession
object SimpleApp {
def main(args: Array[String]) {
val logFile = "file:///root/spark/README.md"
val spark = SparkSession.builder.appName("Simple Application").getOrCreate()
val logData = spark.read.textFile(logFile).cache()
val numAs = logData.filter(line => line.contains("a")).count()
val numBs = logData.filter(line => line.contains("b")).count()
println(s"Lines with a: $numAs, Lines with b: $numBs")
spark.stop()
}
}
to this:
import org.apache.livy._
import org.apache.spark.sql.SparkSession
class Test extends Job[Int]{
override def call(jc: JobContext): Int = {
val spark = jc.sparkSession()
val logFile = "file:///root/spark/README.md"
val logData = spark.read.textFile(logFile).cache()
val numAs = logData.filter(line => line.contains("a")).count()
val numBs = logData.filter(line => line.contains("b")).count()
println(s"Lines with a: $numAs, Lines with b: $numBs")
1 //Return value
}
}
but when compile it with sbt val spark not recognized correctly and i received error "value read is not a member of Nothing"
also after comment spark related code when i try to run resulted jar file using /batches i received error "java.lang.NoSuchMethodException: Test.main([Ljava.lang.String;)"
please any body can show correct spark scala code rewriting way?