网站改版 优势,房产网签流程及注意事项,dede网站开发步骤,上海app开发定制公司今天在移动的云平台上通过jdbc连接hive#xff0c;发现云平台使用了 kerberos的认证。与宁波实验环境不同。
发现一文解决了问题#xff0c;转载如下#xff1a;
原文地址#xff1a;http://blog.csdn.net/zengmingen/article/details/78605086
-----------------------…今天在移动的云平台上通过jdbc连接hive发现云平台使用了 kerberos的认证。与宁波实验环境不同。
发现一文解决了问题转载如下
原文地址http://blog.csdn.net/zengmingen/article/details/78605086
------------------------------ 运用 Ambari 搭建的HDP 集群由于开启了kerberos 对外提供Hive数据时统一用JDBC 的方式所以写了下面这么一个简单样例供第三方数据接入参考。 代码如下所示
package com.bmsoft.hive.impl;import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;/*** 简单的jdbc连接hive实例已开启kerberos服务)*/public class HiveSimple2 {/*** 用于连接Hive所需的一些参数设置 driverName:用于连接hive的JDBC驱动名 When connecting to* HiveServer2 with Kerberos authentication, the URL format is:* jdbc:hive2://host:port/db;principal* Server_Principal_of_HiveServer2*/private static String driverName org.apache.hive.jdbc.HiveDriver;// 注意这里的principal是固定不变的其指的hive服务所对应的principal,而不是用户所对应的principalprivate static String url jdbc:hive2://bigdata40:10000/admin;principalhive/bigdata40BIGDATA.COM;private static String sql ;private static ResultSet res;public static Connection get_conn() throws SQLException, ClassNotFoundException {/** 使用Hadoop安全登录 **/org.apache.hadoop.conf.Configuration conf new org.apache.hadoop.conf.Configuration();conf.set(hadoop.security.authentication, Kerberos);if (System.getProperty(os.name).toLowerCase().startsWith(win)) {// 默认这里不设置的话win默认会到 C盘下读取krb5.initSystem.setProperty(java.security.krb5.conf, C:/Windows/krbconf/bms/krb5.ini);} // linux 会默认到 /etc/krb5.conf 中读取krb5.conf,本文笔者已将该文件放到/etc/目录下因而这里便不用再设置了try {UserGroupInformation.setConfiguration(conf);UserGroupInformation.loginUserFromKeytab(test2/hdp39BMSOFT.COM, ./conf/test2.keytab);} catch (IOException e1) {e1.printStackTrace();}Class.forName(driverName);Connection conn DriverManager.getConnection(url);return conn;}/*** 查看数据库下所有的表** param statement* return*/public static boolean show_tables(Statement statement) {sql SHOW TABLES;System.out.println(Running: sql);try {ResultSet res statement.executeQuery(sql);System.out.println(执行“sql运行结果:);while (res.next()) {System.out.println(res.getString(1));}return true;} catch (SQLException e) {e.printStackTrace();}return false;}/*** 获取表的描述信息** param statement* param tableName* return*/public static boolean describ_table(Statement statement, String tableName) {sql DESCRIBE tableName;try {res statement.executeQuery(sql);System.out.print(tableName 描述信息:);while (res.next()) {System.out.println(res.getString(1) \t res.getString(2));}return true;} catch (SQLException e) {e.printStackTrace();}return false;}/*** 删除表** param statement* param tableName* return*/public static boolean drop_table(Statement statement, String tableName) {sql DROP TABLE IF EXISTS tableName;System.out.println(Running: sql);try {statement.execute(sql);System.out.println(tableName 删除成功);return true;} catch (SQLException e) {System.out.println(tableName 删除失败);e.printStackTrace();}return false;}/*** 查看表数据** param statement* return*/public static boolean queryData(Statement statement, String tableName) {sql SELECT * FROM tableName LIMIT 20;System.out.println(Running: sql);try {res statement.executeQuery(sql);System.out.println(执行“sql运行结果:);while (res.next()) {System.out.println(res.getString(1) , res.getString(2) , res.getString(3));}return true;} catch (SQLException e) {e.printStackTrace();}return false;}/*** 创建表** param statement* return*/public static boolean createTable(Statement statement, String tableName) {sql CREATE TABLE test_1m_test2 AS SELECT * FROM test_1m_test; // 为了方便直接复制另一张表数据来创建表System.out.println(Running: sql);try {boolean execute statement.execute(sql);System.out.println(执行结果 execute);return true;} catch (SQLException e) {e.printStackTrace();}return false;}public static void main(String[] args) {try {Connection conn get_conn();Statement stmt conn.createStatement();// 创建的表名String tableName test_100m;show_tables(stmt);// describ_table(stmt, tableName);/** 删除表 **/// drop_table(stmt, tableName);// show_tables(stmt);// queryData(stmt, tableName);createTable(stmt, tableName);conn.close();} catch (Exception e) {e.printStackTrace();} finally {System.out.println(!!!!!!END!!!!!!!!);}}
}
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176 pom.xml 文件如下所示
dependencies!-- https://mvnrepository.com/artifact/org.apache.hive/hive-jdbc --dependencygroupIdorg.apache.hive/groupIdartifactIdhive-jdbc/artifactIdversion1.2.1/version/dependency!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common --dependencygroupIdorg.apache.hadoop/groupIdartifactIdhadoop-common/artifactIdversion2.7.1/version/dependency!-- https://mvnrepository.com/artifact/org.apache.hive/hive-exec --dependencygroupIdorg.apache.hive/groupIdartifactIdhive-exec/artifactIdversion1.2.1/version/dependency!-- https://mvnrepository.com/artifact/org.apache.hive/hive-metastore --dependencygroupIdorg.apache.hive/groupIdartifactIdhive-metastore/artifactIdversion1.2.1/version/dependency!-- https://mvnrepository.com/artifact/org.apache.hive/hive-common --dependencygroupIdorg.apache.hive/groupIdartifactIdhive-common/artifactIdversion1.2.1/version/dependency!-- https://mvnrepository.com/artifact/org.apache.hive/hive-service --dependencygroupIdorg.apache.hive/groupIdartifactIdhive-service/artifactIdversion1.2.1/version/dependencydependencygroupIdlog4j/groupIdartifactIdlog4j/artifactIdversion1.2.17/versiontypejar/type/dependencydependencygroupIdorg.apache.hadoop/groupIdartifactIdhadoop-client/artifactIdversion2.7.3/version/dependency12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849 参考文档 https://cwiki.apache.org/confluence/display/Hive/HiveServer2Clients 文档其中比较值得注意的一点是
JDBC Client Setup for a Secure Cluster
When connecting to HiveServer2 with Kerberos authentication, the URL format is:
jdbc:hive2://host:port/db;principalServer_Principal_of_HiveServer2123
这里的principal是固定不变的其指的hive服务所对应的principal,而不是用户所对应的principal; 对于这里的可以为不存在的数据库但是如果这么做那么在查询表的时候则需要指出其所在的库如db.tablename否则默认会查询所对应的表。