0

enter code here hims-scheduler-2.3.9.jar:/app/platform/spark-3.2.1-bin-hadoop3.2/jars/hive-shims-scheduler-2.3.9.jar:/app/platform/hadoop-3.3.2/etc/hadoop] Exception in thread "main" scala.reflect.internal.FatalError: Error accessing /app/platform/spark-3.2.1-bin-hadoop3.2/jars/._dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar at scala.tools.nsc.classpath.AggregateClassPath.$anonfun$list$3(AggregateClassPath.scala:113) at scala.collection.Iterator.foreach(Iterator.scala:943) at scala.collection.Iterator.foreach$(Iterator.scala:943) at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) at scala.collection.IterableLike.foreach(IterableLike.scala:74) at scala.collection.IterableLike.foreach$(IterableLike.scala:73) at scala.collection.AbstractIterable.foreach(Iterable.scala:56) at scala.tools.nsc.classpath.AggregateClassPath.list(AggregateClassPath.scala:101) at scala.tools.nsc.util.ClassPath.list(ClassPath.scala:36) at scala.tools.nsc.util.ClassPath.list$(ClassPath.scala:36) at scala.tools.nsc.classpath.AggregateClassPath.list(AggregateClassPath.scala:30) at scala.tools.nsc.symtab.SymbolLoaders$PackageLoader.doComplete(SymbolLoaders.scala:298) at scala.tools.nsc.symtab.SymbolLoaders$SymbolLoader.complete(SymbolLoaders.scala:250) at scala.reflect.internal.Symbols$Symbol.completeInfo(Symbols.scala:1542) at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1514) at scala.reflect.internal.Mirrors$RootsBase.init(Mirrors.scala:258) at scala.tools.nsc.Global.rootMirror$lzycompute(Global.scala:74) at scala.tools.nsc.Global.rootMirror(Global.scala:72) at scala.tools.nsc.Global.rootMirror(Global.scala:44) at scala.reflect.internal.Definitions$DefinitionsClass.ObjectClass$lzycompute(Definitions.scala:294) at scala.reflect.internal.Definitions$DefinitionsClass.ObjectClass(Definitions.scala:294) at scala.reflect.internal.Definitions$DefinitionsClass.init(Definitions.scala:1504) at scala.tools.nsc.Global$Run.(Global.scala:1213) at scala.tools.nsc.interpreter.IMain._initialize(IMain.scala:124) at scala.tools.nsc.interpreter.IMain.initializeSynchronous(IMain.scala:146) at org.apache.spark.repl.SparkILoop.$anonfun$process$10(SparkILoop.scala:211) at org.apache.spark.repl.SparkILoop.withSuppressedSettings$1(SparkILoop.scala:189) at org.apache.spark.repl.SparkILoop.startup$1(SparkILoop.scala:201) at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:236) at org.apache.spark.repl.Main$.doMain(Main.scala:78) at org.apache.spark.repl.Main$.main(Main.scala:58) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:955) at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1043) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1052) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) Caused by: java.io.IOException: Error accessing /app/platform/spark-3.2.1-bin-hadoop3.2/jars/._dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar at scala.reflect.io.FileZipArchive.scala$reflect$io$FileZipArchive$$openZipFile(ZipArchive.scala:182) at scala.reflect.io.FileZipArchive.root$lzycompute(ZipArchive.scala:230) at scala.reflect.io.FileZipArchive.root(ZipArchive.scala:227) at scala.reflect.io.FileZipArchive.allDirs$lzycompute(ZipArchive.scala:264) at scala.reflect.io.FileZipArchive.allDirs(ZipArchive.scala:264) at scala.tools.nsc.classpath.ZipArchiveFileLookup.findDirEntry(ZipArchiveFileLookup.scala:76) at scala.tools.nsc.classpath.ZipArchiveFileLookup.list(ZipArchiveFileLookup.scala:63) at scala.tools.nsc.classpath.ZipArchiveFileLookup.list$(ZipArchiveFileLookup.scala:62) at scala.tools.nsc.classpath.ZipAndJarClassPathFactory$ZipArchiveClassPath.list(ZipAndJarFileLookupFactory.scala:58) at scala.tools.nsc.classpath.AggregateClassPath.$anonfun$list$3(AggregateClassPath.scala:105) ... 43 more Caused by: java.util.zip.ZipException: error in opening zip file at java.util.zip.ZipFile.open(Native Method) at java.util.zip.ZipFile.(ZipFile.java:228) at java.util.zip.ZipFile.(ZipFile.java:157) at java.util.zip.ZipFile.(ZipFile.java:171) at scala.reflect.io.FileZipArchive.scala$reflect$io$FileZipArchive$$openZipFile(ZipArchive.scala:179) ... 52 more

  • Please clarify your specific problem or provide additional details to highlight exactly what you need. As it's currently written, it's hard to tell exactly what you're asking. – Community Jun 30 '22 at 14:25
  • When i run the spark-shell on the worker node i get above exception: it works fine in master node but i see this exception only on worker nodes – Vijay Venugopal Jul 05 '22 at 06:19

0 Answers0