1

I am trying to open jdbc connection from java app when running on windows pc.

Hive is already installed on a oracle linux server. I am using kerberos for secure authentication. I took ticket using the keytab when keytab run on linux server. It works perfectly. I get keytab file in windows pc to use for creating ticket before create connection.

my java code is

import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.hadoop.security.UserGroupInformation;

public class Ana {
    private static String driverName = "org.apache.hive.jdbc.HiveDriver";
    public static void main(String[] args) throws SQLException, IOException {
        try {
            org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
            conf.set("hadoop.security.authentication", "Kerberos");
            conf.set("hive.server2.authentication", "Kerberos");
            UserGroupInformation.setConfiguration(conf);
            UserGroupInformation.loginUserFromKeytab(
                    "neverwinter@FW.XXX.COM.TR", "C:\\Users\\neverwinter\\Documents\\calisan\\yeni2\\neverwinter.keytab");
            Class.forName(driverName);
        } catch ( ClassNotFoundException e ) {
            e.printStackTrace();
            System.exit(1);
        }
        Connection con = DriverManager
                .getConnection("jdbc:hive2://myHost:10000/TGAR;principal=hive/neverwinter@FW.XXX.COM.TR");
        Statement stmt = con.createStatement();
        String tableName = "newTable";
        stmt.execute("drop table if exists " + tableName);
        stmt.execute("create table " + tableName + " (key int, value string)");
    }
}

my pom xml is

<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-jdbc</artifactId>
    <version>1.2.1</version>
            <exclusions>
            <exclusion>
            <groupId>jdk.tools</groupId>
            <artifactId>jdk.tools</artifactId>
        </exclusion>
    </exclusions>
</dependency>
<dependency>
      <groupId>org.apache.hive</groupId>
      <artifactId>hive-exec</artifactId>
      <version>0.13.0</version>
</dependency>
<dependency>
      <groupId>org.apache.hive</groupId>
      <artifactId>hive-jdbc</artifactId>
      <version>0.13.0</version>
</dependency>
<dependency>
      <groupId>org.apache.thrift</groupId>
      <artifactId>libthrift</artifactId>
      <version>0.9.0</version>
</dependency>
<dependency>
      <groupId>org.apache.thrift</groupId>
      <artifactId>libfb303</artifactId>
      <version>0.9.0</version>
</dependency>
<dependency>
      <groupId>commons-logging</groupId>
      <artifactId>commons-logging</artifactId>
      <version>1.1.3</version>
</dependency>
<dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-client</artifactId>
      <version>2.6.0</version>
</dependency>

When the line getConnection it throws the error below:

Caused by: org.apache.thrift.transport.TTransportException: GSS initiate failed at org.apache.thrift.transport.TSaslTransport.sendAndThrowMessage(TSaslTransport.java:221) at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:297) at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37) at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52) at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Unknown Source) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628) at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49) at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:203) ... 5 more


JVM parameters

-Djava.security.krb5.debug=true 
-Djava.security.krb5.conf="‪C:\Users\neverwinter\Desktop\krb5.conf"
-Djava.security.auth.login.config="C:\Users\neverwinter\Desktop\gss-jaas.conf"

I searched about gss fails but nothing can help about that case. What is the thing that I am missing? Any idea would be great.

neverwinter
  • 810
  • 2
  • 15
  • 42
  • Service principal `hive/myHost@neverwinter` does not make sense since your client principal is `neverwinter@FW.XXX.COM.TR` ; what about `hive/myHost@FW.XXX.COM.TR` ? – Samson Scharfrichter Jun 21 '18 at 16:48
  • Check my comments on https://stackoverflow.com/questions/50951656/cannot-connect-locally-to-hdfs-kerberized-cluster-using-intellij about how to enable the Kerberos debug traces -- I'm tired of repeating the same stuff again and again. – Samson Scharfrichter Jun 21 '18 at 16:56
  • keep calm bro. hive/myHost@FW.XXX.COM.TR does not work too. I tried all combinations :D by the way I am already set the -Dsun.security.krb5.debug=true and -Djava.security.krb5.conf="‪C:\Users\neverwinter\Desktop\krb5.conf" -Djava.security.auth.login.config="C:\Users\neverwinter\Desktop\gss-jaas.conf" in IDE in JVM parameters, but it doesn't write detailed info. @SamsonScharfrichter – neverwinter Jun 22 '18 at 04:28
  • 1
    **1.** SPN syntax is `svc_type/canonical.dns.name@REALM`, full stop, but beware of case and of DNS inconsistencies **2.** without specific trace flags you will see nothing, except maybe **3.** the GSS `minor code`, cf. https://steveloughran.gitbooks.io/kerberos_and_hadoop/content/sections/errors.html _(read the whole GitBook to understand the sticky mess you're getting into)_ – Samson Scharfrichter Jun 22 '18 at 06:45

0 Answers0