Krb5HiveConnectionServiceImpl.java 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. package com.primeton.dsp.datarelease.data.bdata;
  2. import com.primeton.dsp.datarelease.server.model.DspHiveResource;
  3. import lombok.extern.slf4j.Slf4j;
  4. import org.apache.hadoop.conf.Configuration;
  5. import org.apache.hadoop.hive.conf.HiveConf;
  6. import java.sql.Connection;
  7. import java.sql.DriverManager;
  8. import java.sql.SQLException;
  9. import java.util.Set;
  10. /**
  11. *
  12. * Hive Kerberos 认证方式获得连接
  13. *
  14. *
  15. * <pre>
  16. *
  17. * Created by zhaopx.
  18. * User: zhaopx
  19. * Date: 2020/4/22
  20. * Time: 18:02
  21. *
  22. * </pre>
  23. *
  24. * @author zhaopx
  25. */
  26. @Slf4j
  27. public class Krb5HiveConnectionServiceImpl implements HiveConnectionService {
  28. /**
  29. * Hive 数据源
  30. */
  31. final DspHiveResource hiveResource;
  32. String hiveUrl;
  33. public Krb5HiveConnectionServiceImpl(DspHiveResource hiveResource) {
  34. this.hiveResource = hiveResource;
  35. }
  36. @Override
  37. public boolean doAuth() {
  38. //KrbUser = "hadoop/cdh-node1@HADOOP.COM";
  39. // 认证传过来
  40. AuthPrincipalCreator authPrincipalCreator = AuthPrincipalCreator.useDataReleaseConf(hiveResource.getAuthBasePath());
  41. Set<String> principals = authPrincipalCreator.listPrincipals();
  42. log.info("find existed principals: {}", principals);
  43. AuthPrincipal kerberosPrincipal = authPrincipalCreator.getKerberosPrincipal(hiveResource.getHiveDbUser());
  44. String userKeytab = kerberosPrincipal.getUserKeytabFile().getAbsolutePath();
  45. String krb5File = kerberosPrincipal.getKrb5File().getAbsolutePath();
  46. String krbUser = kerberosPrincipal.getPrincipal();
  47. StringBuffer buffer = new StringBuffer(hiveResource.getHiveUrl());
  48. buffer.append(";principal=").append(krbUser);
  49. hiveUrl = buffer.toString();
  50. log.info("HIVE_URL : " + hiveUrl);
  51. // 分别加载 core、hdfs、hive site 文件
  52. Configuration conf = new Configuration();
  53. try {
  54. if (kerberosPrincipal.getCoreSite() != null) {
  55. conf.addResource(kerberosPrincipal.getCoreSite().toURL());
  56. log.info("add config: {}", kerberosPrincipal.getCoreSite().getAbsolutePath());
  57. }
  58. if (kerberosPrincipal.getHdfsSite() != null) {
  59. conf.addResource(kerberosPrincipal.getHdfsSite().toURL());
  60. log.info("add config: {}", kerberosPrincipal.getHdfsSite().getAbsolutePath());
  61. }
  62. if (kerberosPrincipal.getHiveSite() != null) {
  63. conf.addResource(kerberosPrincipal.getHiveSite().toURL());
  64. log.info("add config: {}", kerberosPrincipal.getHiveSite().getAbsolutePath());
  65. }
  66. } catch (Exception e) {
  67. throw new IllegalStateException(e);
  68. }
  69. // Kerberos 认证
  70. KerberosUtil.loginKerberos(conf, krbUser, userKeytab, krb5File);
  71. log.info("hive kerberos 认证通过。");
  72. return true;
  73. }
  74. @Override
  75. public Connection getConnection() throws SQLException {
  76. try {
  77. Class.forName("org.apache.hive.jdbc.HiveDriver");
  78. } catch (ClassNotFoundException e) {
  79. throw new SQLException("找不到Hive驱动:org.apache.hive.jdbc.HiveDriver.", e);
  80. }
  81. return DriverManager.getConnection(hiveUrl, "", "");
  82. }
  83. }