1

Exception in thread "main" java.lang.NoSuchMethodError: com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper.$init$(Lcom/fasterxml/jackson/module/scala/experimental/ScalaObjectMapper;)V While trying to write a df as a delta table in spark.

spark 2.4.4, scala 2.12


spark-shell --packages io.delta:delta-core_2.12:0.5.0 

val dF=spark.read.load("path")
dF.toDF.write.format("delta").mode("overwrite").partitionBy("date","topic","partition","key").save("path")

Error:


java.lang.NoSuchMethodError: com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper.$init$(Lcom/fasterxml/jackson/module/scala/experimental/ScalaObjectMapper;)V
  at org.apache.spark.sql.delta.util.JsonUtils$$anon$1.<init>(JsonUtils.scala:27)
  at org.apache.spark.sql.delta.util.JsonUtils$.<init>(JsonUtils.scala:27)
  at org.apache.spark.sql.delta.util.JsonUtils$.<clinit>(JsonUtils.scala)
  at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaEvent(DeltaLogging.scala:62)
  at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaEvent$(DeltaLogging.scala:56)
  at org.apache.spark.sql.delta.DeltaOptions$.recordDeltaEvent(DeltaOptions.scala:133)
  at org.apache.spark.sql.delta.DeltaOptions$.verifyOptions(DeltaOptions.scala:176)
  at org.apache.spark.sql.delta.DeltaOptions.<init>(DeltaOptions.scala:128)
  at org.apache.spark.sql.delta.DeltaOptions.<init>(DeltaOptions.scala:130)
  at org.apache.spark.sql.delta.sources.DeltaDataSource.createRelation(DeltaDataSource.scala:130)
  at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:46)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
  at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:131)
  at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:155)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
  at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
  at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
  at org.apache.spark.sql.DataFrameWriter.$anonfun$runCommand$1(DataFrameWriter.scala:676)
  at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:78)
  at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
  at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
  at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676)
  at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:290)
  at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:271)
  at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:229)
  ... 51 elided

Class path has these below jars:

com.fasterxml.jackson.core:jackson-databind:2.10.1,com.fasterxml.jackson.core:jackson-core:2.10.1,org.codehaus.jackson:jackson-core-asl:1.9.13,org.codehaus.jackson:jackson-mapper-asl:1.9.13,com.fasterxml.jackson.core:jackson-annotations:2.10.1,com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.10.1,com.fasterxml.jackson.module:jackson-module-scala_2.12:2.10.1,com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.10.1,org.json4s:json4s-jackson_2.12:3.5.3,com.twitter:parquet-jackson:1.6.0,org.codehaus.jackson:jackson-jaxrs:1.9.13,org.codehaus.jackson:jackson-xc:1.9.13,com.fasterxml.jackson.module:jackson-module-paranamer:2.10.1,com.google.protobuf:protobuf-java:3.11.1,org.apache.htrace:htrace-core:3.1.0-incubating,commons-cli:commons-cli:1.4

Raptor0009
  • 258
  • 4
  • 14
  • Hi @Raptor0009 is ``jackson-module-scala_2.12`` added to your classpath when running spark-shell ? if not, try to added as an additional package (https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-scala) – baitmbarek Dec 27 '19 at 08:41
  • Hi, I added it com.fasterxml.jackson.module:jackson-module-scala_2.12:2.10.1 to the spark shell. Same error – Raptor0009 Dec 28 '19 at 04:25
  • Hi @Raptor00009, silly question, are you sure your spark-shell is running on version 2.12 ? Scala version is printed on spark-shell startup – baitmbarek Dec 28 '19 at 09:43

0 Answers0