我的原创地址:https://dongkelun.com/2021/05/10/javaSparkThriftServerWithKerberos/
前言总结Java如何连接Kereros认证下的Spark Thrift Server/Hive Server总结
启动关于如何启动 Spark Thrift Server和 Hive Server 请参考https://dongkelun.com/2021/02/19/javaSparkThriftServer/
Java 代码 pom 依赖org.apache.hive hive-jdbc 1.2.1 org.apache.hadoop hadoop-common 2.6.5
关于依赖版本对应同样请参考https://dongkelun.com/2021/02/19/javaSparkThriftServer/
配置文件hive.service.keytab和krb5.conf均为kerberos认证相关,从服务器上下载,krb5.conf路径在/etc下,具体根据自己服务器的配置去查找
krb5.conf
[libdefaults]
renew_lifetime = 7d
forwardable = true
default_realm = INDATA.COM
ticket_lifetime = 24h
dns_lookup_realm = false
dns_lookup_kdc = false
default_ccache_name = /tmp/krb5cc_%{uid}
#default_tgs_enctypes = aes des3-cbc-sha1 rc4 des-cbc-md5
#default_tkt_enctypes = aes des3-cbc-sha1 rc4 des-cbc-md5
[domain_realm]
indata.com = INDATA.COM
[logging]
default = FILE:/var/log/krb5kdc.log
admin_server = FILE:/var/log/kadmind.log
kdc = FILE:/var/log/krb5kdc.log
[realms]
INDATA.COM = {
admin_server = indata-192.168.44.128.indata.com:17490
kdc = indata-192.168.44.128.indata.com
kdc = indata-192.168.44.129.indata.com
}
代码
package com.dkl.blog;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
import java.sql.*;
public class SparkThriftServerDemoWithKerberos {
private static String HIVE_JDBC_URL = "jdbc:hive2://192.168.44.128:10000/sjtt;principal=hive/indata-192-168-44-128.indata.com@INDATA.COM";
private static final String SPARK_JDBC_URL = "jdbc:hive2://192.168.44.128:20003/sjtt;" +
"principal=HTTP/indata-192-168-44-128.indata.com@INDATA.COM?" +
"hive.server2.transport.mode=http;hive.server2.thrift.http.path=cliservice;";
private static final String PRINCIPAL = "hive/indata-192-168-44-128.indata.com@INDATA.COM";
private static final String KEYTAB = "D:\conf\inspur\hive.service.keytab";
private static final String KRB5 = "D:\conf\inspur\krb5.conf";
private static Configuration conf = null;
static {
conf = new Configuration();
}
public static void main(String[] args) throws SQLException {
loadConfiguration();
//----------------------------------connect hive----------------------------------//
System.out.println("select from hive");
jdbcDemo(HIVE_JDBC_URL);
//------------------------------connect spark thrift server-----------------------//
System.out.println("select from spark thrift server");
jdbcDemo(SPARK_JDBC_URL);
}
public static void jdbcDemo(String jdbc_url) throws SQLException {
Connection connection = null;
try {
connection = DriverManager.getConnection(jdbc_url);
selectTable(connection);
} catch (SQLException e) {
e.printStackTrace();
} finally {
connection.close();
}
}
public static void selectTable(Connection connection) {
String sql = "select * from trafficbase_cljbxx limit 10";
Statement stmt = null;
ResultSet rs = null;
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
System.out.println("=====================================");
while (rs.next()) {
System.out.println(rs.getString(1) + "," + rs.getString(2));
}
System.out.println("=====================================");
} catch (SQLException e) {
e.printStackTrace();
} finally {
close(stmt);
close(rs);
}
}
private static void loadConfiguration() {
// 初始化配置文件
try {
conf.set("hadoop.security.authentication", "kerberos");
System.setProperty("java.security.krb5.conf", KRB5);// krb5文件路径
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(PRINCIPAL, KEYTAB);// 入参:principal、keytab文件
} catch (IOException ioE) {
System.err.println("使用keytab登陆失败");
ioE.printStackTrace();
}
}
private static void close(Statement stmt) {
if (stmt != null) {
try {
stmt.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
private static void close(ResultSet rs) {
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
}
代码已上传到 github



