FIHiveConnectionServiceImpl.java 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102
  1. package com.primeton.dsp.datarelease.data.bdata;
  2. import com.primeton.dsp.datarelease.server.model.DspHiveResource;
  3. import lombok.extern.slf4j.Slf4j;
  4. import org.apache.hadoop.conf.Configuration;
  5. import org.apache.hadoop.hive.conf.HiveConf;
  6. import java.sql.Connection;
  7. import java.sql.SQLException;
  8. import java.util.Set;
  9. /**
  10. *
  11. * Hive Kerberos 认证方式获得连接
  12. *
  13. *
  14. * <pre>
  15. *
  16. * Created by zhaopx.
  17. * User: zhaopx
  18. * Date: 2020/4/22
  19. * Time: 18:02
  20. *
  21. * </pre>
  22. *
  23. * @author zhaopx
  24. */
  25. @Slf4j
  26. public class FIHiveConnectionServiceImpl implements HiveConnectionService {
  27. /**
  28. * Hive 数据源
  29. */
  30. final DspHiveResource hiveResource;
  31. private HiveHelper hiveHelper;
  32. public FIHiveConnectionServiceImpl(DspHiveResource hiveResource) {
  33. this.hiveResource = hiveResource;
  34. }
  35. @Override
  36. public boolean doAuth() {
  37. // 认证传过来
  38. AuthPrincipalCreator authPrincipalCreator = AuthPrincipalCreator.useDataReleaseConf(hiveResource.getAuthBasePath());
  39. Set<String> principals = authPrincipalCreator.listPrincipals();
  40. log.info("find existed principals: {}", principals);
  41. AuthPrincipal kerberosPrincipal = authPrincipalCreator.getKerberosPrincipal(hiveResource.getHiveDbUser());
  42. String userKeytabFile = kerberosPrincipal.getUserKeytabFile().getAbsolutePath();
  43. String krb5File = kerberosPrincipal.getKrb5File().getAbsolutePath();
  44. String krbUser = kerberosPrincipal.getPrincipal();
  45. String hiveclientPropFile = kerberosPrincipal.getHiveClientFile().getAbsolutePath();
  46. // 分别加载 core、hdfs、hive site 文件
  47. Configuration conf = new Configuration();
  48. try {
  49. if (kerberosPrincipal.getCoreSite() != null) {
  50. conf.addResource(kerberosPrincipal.getCoreSite().toURL());
  51. log.info("add config: {}", kerberosPrincipal.getCoreSite().getAbsolutePath());
  52. }
  53. if (kerberosPrincipal.getHdfsSite() != null) {
  54. conf.addResource(kerberosPrincipal.getHdfsSite().toURL());
  55. log.info("add config: {}", kerberosPrincipal.getHdfsSite().getAbsolutePath());
  56. }
  57. if (kerberosPrincipal.getHiveSite() != null) {
  58. conf.addResource(kerberosPrincipal.getHiveSite().toURL());
  59. log.info("add config: {}", kerberosPrincipal.getHiveSite().getAbsolutePath());
  60. }
  61. } catch (Exception e) {
  62. throw new IllegalStateException(e);
  63. }
  64. try {
  65. this.hiveHelper = new HiveHelper(conf, hiveclientPropFile, krbUser, userKeytabFile, krb5File);
  66. log.info("hive fusioninsight 认证通过。");
  67. return true;
  68. } catch (Exception e) {
  69. throw new SecurityException("FI 认证失败。", e);
  70. }
  71. }
  72. @Override
  73. public Connection getConnection() throws SQLException {
  74. try {
  75. Class.forName("org.apache.hive.jdbc.HiveDriver");
  76. } catch (ClassNotFoundException e) {
  77. throw new SQLException("找不到Hive驱动:org.apache.hive.jdbc.HiveDriver.", e);
  78. }
  79. return hiveHelper.getPoolConnection();
  80. }
  81. }