2

I'm launching a spark app on Yarn using spark-submit. It is failing with leaseExpiredException with the below stack trace for the keytab used to launch spark app. Cluster is Kerberos and Wandisco enabled. Any ideas on what could've caused this? Replication for all hidden folders is disabled in wandisco. So, /user/ck/.sparkStaging has no replication enabled in wandisco.


Exception in thread "main" 19/02/27 11:20:32 ERROR hdfs.DFSClient: Failed to close inode 1499015670 org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException): No lease on /user/ck/.sparkStaging/application_1551278166756_0297/abc.keytab (inode 1499015670): File does not exist. Holder DFSClient_NONMAPREDUCE_-1650244877_1 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3521) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.completeFileInternal(FSNamesystem.java:3611) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.completeFile(FSNamesystem.java:3578) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.complete(NameNodeRpcServer.java:905) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.complete(ClientNamenodeProtocolServerSideTranslatorPB.java:544) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2313) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2309) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1740) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2307)

at org.apache.hadoop.ipc.Client.call(Client.java:1475)
at org.apache.hadoop.ipc.Client.call(Client.java:1412)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
at com.sun.proxy.$Proxy13.complete(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.complete(ClientNamenodeProtocolTranslatorPB.java:462)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy14.complete(Unknown Source)
at org.apache.hadoop.hdfs.DFSOutputStream.completeFile(DFSOutputStream.java:2291)
at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:2267)
at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:2232)
at org.apache.hadoop.hdfs.DFSClient.closeAllFilesBeingWritten(DFSClient.java:937)
at org.apache.hadoop.hdfs.DFSClient.closeOutputStreams(DFSClient.java:969)
at org.apache.hadoop.hdfs.DistributedFileSystem.close(DistributedFileSystem.java:1076)
at com.wandisco.fs.client.FusionCommon.close(FusionCommon.java:277)
at com.wandisco.fs.client.ReplicatedFC.destroy(ReplicatedFC.java:1036)
at com.wandisco.fs.client.ReplicatedFC.removeReference(ReplicatedFC.java:1005)
at com.wandisco.fs.client.ReplicatedFC.close(ReplicatedFC.java:489)
at com.wandisco.fs.client.FusionHdfs.closeFc(FusionHdfs.java:114)
at com.wandisco.fs.client.FusionHdfs.close(FusionHdfs.java:108)
at org.apache.hadoop.fs.FileSystem$Cache.closeAll(FileSystem.java:2760)
at org.apache.hadoop.fs.FileSystem$Cache$ClientFinalizer.run(FileSystem.java:2777)
at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:54)
Naresh
  • 16,698
  • 6
  • 112
  • 113
chanakya
  • 21
  • 3

0 Answers0