We are trying to connect with the HDFS using kerberos, from Karaf container by OSGI bundle. We have already installed the hadoop client in karaf using apache servicemix bundles.
<groupId>org.apache.servicemix.bundles</groupId>
<artifactId>org.apache.servicemix.bundles.hadoop-client</artifactId>
<version>2.4.1_1</version>
Pom file is attached below:
<build>
<plugins>
<plugin>
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<version>2.3.7</version>
<extensions>true</extensions>
<configuration>
<instructions>
<Bundle-Activator>com.bdbizviz.hadoop.activator.PaHdfsActivator</Bundle-Activator>
<Bundle-SymbolicName>${project.artifactId}</Bundle-SymbolicName>
<Bundle-Version>${project.version}</Bundle-Version>
<Export-Package>
<!-- com.google.*, !org.apache.camel.model.dataformat, !org.apache.poi.ddf,
!org.apache.xmlbeans, org.apache.commons.collections.*, org.apache.commons.configuration.*,
org.apache.hadoop.hdfs*, org.apache.hadoop.hdfs.client*, org.apache.hadoop.hdfs.net*,
org.apache.hadoop.hdfs.protocol.datatransfer*, org.apache.hadoop.hdfs.protocol.proto*,
org.apache.hadoop.hdfs.protocolPB*, org.apache.hadoop.conf.*, org.apache.hadoop.io.*,
org.apache.hadoop.fs.*, org.apache.hadoop.security.*, org.apache.hadoop.metrics2.*,
org.apache.hadoop.util.*, org.apache.hadoop*; -->
<!-- org.apache.*; -->
</Export-Package>
<Import-Package>
org.apache.hadoop*,org.osgi.framework,*;resolution:=optional
</Import-Package>
<Include-Resource>
{maven-resources},
@org.apache.servicemix.bundles.hadoop-client-2.4.1_1.jar!/coredefault.
xml,
@org.apache.servicemix.bundles.hadoop-client-2.4.1_1.jar!/hdfsdefault.
xml,
@org.apache.servicemix.bundles.hadoop-client-
2.4.1_1.jar!/mapred-default.xml,
@org.apache.servicemix.bundles.hadoop-client-
2.4.1_1.jar!/hadoop-metrics.properties
</Include-Resource>
<DynamicImport-Package>*</DynamicImport-Package>
</instructions>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.apache.servicemix.bundles</groupId>
<artifactId>org.apache.servicemix.bundles.hadoop-client</artifactId>
<version>2.4.1_1</version>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<!-- <version>1.7</version> -->
</exclusion>
</exclusions>
</dependency>
</dependencies>
Code Snippet:
public class TestHdfs implements ITestHdfs{
public void printName() throws IOException{
/*
Configuration config = new Configuration();
config.set("fs.default.name", "hdfs://192.168.1.17:8020");
config.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
config.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
try {
fs = FileSystem.get(config);
getHostnames(fs);
} catch (IOException e) {
e.printStackTrace();
}*/
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
final Configuration config = new Configuration();
config.set("fs.default.name", "hdfs://192.168.1.124:8020");
config.set("fs.file.impl", LocalFileSystem.class.getName());
config.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
config.set("hadoop.security.authentication", "KERBEROS");
config.set("dfs.namenode.kerberos.principal.pattern",
"hdfs/*@********.COM");
System.setProperty("HADOOP_JAAS_DEBUG", "true");
System.setProperty("sun.security.krb5.debug", "true");
System.setProperty("java.net.preferIPv4Stack", "true");
System.out.println("--------------status---:"
+ UserGroupInformation.isSecurityEnabled());
UserGroupInformation.setConfiguration(config);
// UserGroupInformation.loginUserFromKeytab(
// "hdfs/hadoop1.********.com@********.COM",
// "file:/home/kaushal/hdfs-hadoop1.keytab");
UserGroupInformation app_ugi = UserGroupInformation
.loginUserFromKeytabAndReturnUGI("hdfs/hadoop1.********.com@********.COM",
"C:\\Users\\desanth.pv\\Desktop\\hdfs-hadoop1.keytab");
UserGroupInformation proxy_ugi = UserGroupInformation.createProxyUser(
"ssdfsdfsdfsdfag", app_ugi);
System.out.println("--------------status---:"
+ UserGroupInformation.isSecurityEnabled());
/*ClassLoader tccl = Thread.currentThread()
.getContextClassLoader();*/
try {
/*Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());*/
proxy_ugi.doAs(new PrivilegedExceptionAction() {
@Override
public Object run() throws Exception {
/*ClassLoader tccl = Thread.currentThread()
.getContextClassLoader();*/
try {
/*Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());*/
System.out.println("desanth");
FileSystem fs = FileSystem.get(config);
DistributedFileSystem hdfs = (DistributedFileSystem) fs;
DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();
String[] names = new String[dataNodeStats.length];
for (int i = 0; i < dataNodeStats.length; i++) {
names[i] = dataNodeStats[i].getHostName();
System.out.println((dataNodeStats[i].getHostName()));
}
} catch (IOException e) {
e.printStackTrace();
} finally {
//Thread.currentThread().setContextClassLoader(tccl);
}
return null;
}
});
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
/*Thread.currentThread().setContextClassLoader(tccl);*/
}
}
public void getHostnames(FileSystem fs) throws IOException {
DistributedFileSystem hdfs = (DistributedFileSystem) fs;
DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();
String[] names = new String[dataNodeStats.length];
for (int i = 0; i < dataNodeStats.length; i++) {
names[i] = dataNodeStats[i].getHostName();
System.out.println((dataNodeStats[i].getHostName()));
}
}
}
Error :
Caused by: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]
[12:35:51 PM] Jayendra Parsai: java.io.IOException: Failed on local exception: java.io.IOException: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]; Host Details : local host is: "jayendra-dynabook-T451-34EW/127.0.1.1"; destination host is: "hadoop2.********.com":8020;