Krb5HiveConnectionServiceImpl.java 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. package com.primeton.damp.bigdata;
  2. import lombok.extern.slf4j.Slf4j;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hive.conf.HiveConf;
  5. import java.sql.Connection;
  6. import java.sql.DriverManager;
  7. import java.sql.SQLException;
  8. import java.util.Properties;
  9. import java.util.Set;
  10. /**
  11. *
  12. * Hive Kerberos 认证方式获得连接
  13. *
  14. *
  15. * <pre>
  16. *
  17. * Created by zhaopx.
  18. * User: zhaopx
  19. * Date: 2020/4/22
  20. * Time: 18:02
  21. *
  22. * </pre>
  23. *
  24. * @author zhaopx
  25. */
  26. @Slf4j
  27. public class Krb5HiveConnectionServiceImpl implements HiveConnectionService {
  28. /**
  29. * Hive 数据源
  30. */
  31. final Properties params;
  32. /**
  33. * 认证文件所在的基础目录
  34. */
  35. final String authBasePath;
  36. String hiveUrl;
  37. public Krb5HiveConnectionServiceImpl(Properties params) {
  38. this.params = params;
  39. this.authBasePath = params.getProperty("authBasePath");
  40. }
  41. @Override
  42. public boolean doAuth() {
  43. //KrbUser = "hadoop/cdh-node1@HADOOP.COM";
  44. log.info("hive 开始 kerberos 认证。");
  45. AuthPrincipalCreator authPrincipalCreator = AuthPrincipalCreator.useExtractorConf(authBasePath);
  46. Set<String> principals = authPrincipalCreator.listPrincipals();
  47. log.info("find existed principals: {}", principals);
  48. AuthPrincipal kerberosPrincipal = authPrincipalCreator.getKerberosPrincipal(params.getProperty("hiveDbUser"));
  49. String userKeytab = kerberosPrincipal.getUserKeytabFile().getAbsolutePath();
  50. String krb5File = kerberosPrincipal.getKrb5File().getAbsolutePath();
  51. String krbUser = kerberosPrincipal.getPrincipal();
  52. StringBuffer buffer = new StringBuffer(params.getProperty("hiveUrl"));
  53. if(!buffer.toString().contains(";principal=")) {
  54. buffer.append(";principal=").append(krbUser);
  55. }
  56. hiveUrl = buffer.toString();
  57. log.info("HIVE_URL : " + hiveUrl);
  58. // 分别加载 core、hdfs、hive site 文件
  59. Configuration conf = new Configuration();
  60. try {
  61. if (kerberosPrincipal.getCoreSite() != null) {
  62. conf.addResource(kerberosPrincipal.getCoreSite().toURL());
  63. log.info("add config: {}", kerberosPrincipal.getCoreSite().getAbsolutePath());
  64. }
  65. if (kerberosPrincipal.getHdfsSite() != null) {
  66. conf.addResource(kerberosPrincipal.getHdfsSite().toURL());
  67. log.info("add config: {}", kerberosPrincipal.getHdfsSite().getAbsolutePath());
  68. }
  69. if (kerberosPrincipal.getHiveSite() != null) {
  70. conf.addResource(kerberosPrincipal.getHiveSite().toURL());
  71. log.info("add config: {}", kerberosPrincipal.getHiveSite().getAbsolutePath());
  72. }
  73. } catch (Exception e) {
  74. throw new IllegalStateException(e);
  75. }
  76. // Kerberos 认证
  77. KerberosUtil.loginKerberos(conf, krbUser, userKeytab, krb5File);
  78. log.info("hive kerberos 认证通过。");
  79. return true;
  80. }
  81. @Override
  82. public Connection getConnection() throws SQLException {
  83. try {
  84. Class.forName("org.apache.hive.jdbc.HiveDriver");
  85. } catch (ClassNotFoundException e) {
  86. throw new SQLException("找不到Hive驱动:org.apache.hive.jdbc.HiveDriver.", e);
  87. }
  88. return DriverManager.getConnection(hiveUrl, "", "");
  89. }
  90. }