zhzhenqin 5 gadi atpakaļ
revīzija
65ad258d98
44 mainītis faili ar 5421 papildinājumiem un 0 dzēšanām
  1. 18 0
      .gitignore
  2. 6 0
      README.md
  3. 0 0
      entity-to-sql/README.md
  4. 57 0
      entity-to-sql/model/Field.java
  5. 133 0
      entity-to-sql/model/JoinPair.java
  6. 76 0
      entity-to-sql/model/SelectField.java
  7. 124 0
      entity-to-sql/model/Table.java
  8. 162 0
      entity-to-sql/model/WhereCause.java
  9. 172 0
      entity-to-sql/sql/SQLCreatorFactory.java
  10. 590 0
      entity-to-sql/sql/SQLJoin.java
  11. 353 0
      entity-to-sql/sql/SQLQuery.java
  12. 66 0
      hadoop-auth/AuthPrincipal.java
  13. 212 0
      hadoop-auth/AuthPrincipalCreator.java
  14. 102 0
      hadoop-auth/FIHiveConnectionServiceImpl.java
  15. 141 0
      hadoop-auth/FiHBaseConnectionServiceImpl.java
  16. 43 0
      hadoop-auth/HBaseConnectionFactory.java
  17. 45 0
      hadoop-auth/HBaseConnectionService.java
  18. 47 0
      hadoop-auth/HiveConnectionFactory.java
  19. 35 0
      hadoop-auth/HiveConnectionService.java
  20. 178 0
      hadoop-auth/HiveHelper.java
  21. 38 0
      hadoop-auth/KerberosUtil.java
  22. 122 0
      hadoop-auth/Krb5HBaseConnectionServiceImpl.java
  23. 100 0
      hadoop-auth/Krb5HiveConnectionServiceImpl.java
  24. 554 0
      hadoop-auth/LoginUtil.java
  25. 33 0
      hadoop-auth/PooledDataSource.java
  26. 210 0
      hadoop-auth/SimpleDataSource.java
  27. 140 0
      hadoop-auth/SimpleHBaseConnectionServiceImpl.java
  28. 61 0
      hadoop-auth/SimpleHiveConnectionServiceImpl.java
  29. 51 0
      java-commons-cache/CacheEvict.java
  30. 39 0
      java-commons-cache/Cacheable.java
  31. 102 0
      java-commons-cache/ICache.java
  32. 26 0
      java-commons-cache/impl/AbstractCacheService.java
  33. 149 0
      java-commons-cache/impl/CacheFactory.java
  34. 254 0
      java-commons-cache/impl/CacheInterceptorImpl.java
  35. 159 0
      java-commons-cache/impl/Ehcache.java
  36. 144 0
      java-commons-cache/impl/Memcached.java
  37. 158 0
      java-commons-cache/impl/Redised.java
  38. 63 0
      java-commons-cache/serde/JdkSerializer.java
  39. 45 0
      java-commons-cache/serde/Serializer.java
  40. 46 0
      java-commons-cache/serde/StringSerializer.java
  41. 61 0
      java-message-mq/Event.java
  42. 27 0
      java-message-mq/MessageConsumer.java
  43. 37 0
      java-message-mq/MessageProvider.java
  44. 242 0
      java-message-mq/MessageService.java

+ 18 - 0
.gitignore

@@ -0,0 +1,18 @@
+*.class
+*.jar
+*.war
+*.ear
+*.log
+*.svn
+*.iml
+.classpath
+.project
+
+# Package Files #
+.svn
+.idea
+.settings
+target
+db
+log
+logs

+ 6 - 0
README.md

@@ -0,0 +1,6 @@
+> 经典 Java 代码集锦
+
+1. 实体生成查询 SQL
+2. Hadoop 认证相关;
+3. Java 缓存实现,多种缓存引擎;
+4. Java 线程消息实现;

+ 0 - 0
entity-to-sql/README.md


+ 57 - 0
entity-to-sql/model/Field.java

@@ -0,0 +1,57 @@
+package com.primeton.dsp.datarelease.api.model;
+
+/**
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/28
+ * Time: 10:34
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public class Field {
+
+
+    /**
+     * 字段名称
+     */
+    String fieldName;
+
+
+    /**
+     * 字段别名
+     */
+    String alias;
+
+
+    public Field() {
+    }
+
+    public Field(String fieldName) {
+        this.fieldName = fieldName;
+        this.alias = fieldName;
+    }
+
+    public Field(String fieldName, String alias) {
+        this.fieldName = fieldName;
+        this.alias = alias;
+    }
+
+    public String getFieldName() {
+        return fieldName;
+    }
+
+    public void setFieldName(String fieldName) {
+        this.fieldName = fieldName;
+    }
+
+    public String getAlias() {
+        return alias;
+    }
+
+    public void setAlias(String alias) {
+        this.alias = alias;
+    }
+}

+ 133 - 0
entity-to-sql/model/JoinPair.java

@@ -0,0 +1,133 @@
+/**
+ * 
+ */
+package com.primeton.dsp.datarelease.api.model;
+
+import org.apache.commons.lang.builder.ToStringBuilder;
+
+/**
+ * @author zhaopx
+ *
+ */
+public class JoinPair {
+	
+	
+	/**
+	 * left table
+	 */
+	String left;
+
+
+	/**
+	 * 左边表关联的字段
+	 */
+	String leftField;
+	
+	/**
+	 * join type: inner_join,left_join,right_join, full_out_join;
+	 */
+	String joinType;
+
+
+	/**
+	 * 运算逻辑: =(等于),正常的JOIN都是等于。其他奇葩的 >(大于),<(小于),>=(大于或等于),<=(小于或等于),<> or !=(不等于)也理论支持。
+	 */
+	String opera = "=";
+	
+	/**
+	 * right table
+	 */
+	String right;
+
+
+	/**
+	 * 右边表关联的字段
+	 */
+	String rightField;
+	
+
+	/**
+	 * 
+	 */
+	public JoinPair() {
+		
+	}
+
+
+	public JoinPair(String left, String joinType, String right) {
+		super();
+		this.left = left;
+		this.joinType = joinType;
+		this.right = right;
+	}
+
+	public JoinPair(String left, String leftField, String joinType, String right, String rightField) {
+		this.left = left;
+		this.leftField = leftField;
+		this.joinType = joinType;
+		this.right = right;
+		this.rightField = rightField;
+	}
+
+	public String getLeft() {
+		return left;
+	}
+
+
+	public String getRight() {
+		return right;
+	}
+
+	
+	
+
+	public String getJoinType() {
+		return joinType;
+	}
+
+
+	public void setJoinType(String joinType) {
+		this.joinType = joinType;
+	}
+
+
+	public String getOpera() {
+		return opera;
+	}
+
+	public void setOpera(String opera) {
+		this.opera = opera;
+	}
+
+	public void setLeft(String left) {
+		this.left = left;
+	}
+
+
+	public void setRight(String right) {
+		this.right = right;
+	}
+
+
+	public String getLeftField() {
+		return leftField;
+	}
+
+	public void setLeftField(String leftField) {
+		this.leftField = leftField;
+	}
+
+	public String getRightField() {
+		return rightField;
+	}
+
+	public void setRightField(String rightField) {
+		this.rightField = rightField;
+	}
+
+	@Override
+	public String toString(){
+		return ToStringBuilder.reflectionToString(this);
+	}
+
+}

+ 76 - 0
entity-to-sql/model/SelectField.java

@@ -0,0 +1,76 @@
+package com.primeton.dsp.datarelease.api.model;
+
+import net.sf.jsqlparser.expression.Alias;
+import net.sf.jsqlparser.expression.Expression;
+import net.sf.jsqlparser.expression.ExpressionVisitor;
+import net.sf.jsqlparser.parser.ASTNodeAccessImpl;
+import net.sf.jsqlparser.schema.Column;
+
+/**
+ * <pre>
+ *
+ * Created by zhenqin.
+ * User: zhenqin
+ * Date: 2020/4/28
+ * Time: 17:48
+ * Vendor: yiidata.com
+ * To change this template use File | Settings | File Templates.
+ *
+ * </pre>
+ *
+ * @author zhenqin
+ */
+public class SelectField extends ASTNodeAccessImpl implements Expression {
+
+
+    /**
+     * 查询的字段,Column 是 final 无法继承造成的
+     */
+    private Column column;
+
+
+    /**
+     * 查询的字段别名
+     */
+    private Alias alias;
+
+
+    public SelectField(Expression expression) {
+        this((Column)expression);
+    }
+
+
+
+    public SelectField(Column expression) {
+        this.column = expression;
+    }
+
+
+    public SelectField(Column expression, Alias alias) {
+        this.column = expression;
+        this.alias = alias;
+    }
+
+    public Column getExpression() {
+        return column;
+    }
+
+
+    public Alias getAlias() {
+        return alias;
+    }
+
+    public void setAlias(Alias alias) {
+        this.alias = alias;
+    }
+
+    @Override
+    public void accept(ExpressionVisitor expressionVisitor) {
+        expressionVisitor.visit(column);
+    }
+
+    @Override
+    public String toString() {
+        return column + ((alias != null) ? alias.toString() : "");
+    }
+}

+ 124 - 0
entity-to-sql/model/Table.java

@@ -0,0 +1,124 @@
+/**
+ * 
+ */
+package com.primeton.dsp.datarelease.api.model;
+
+import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ *
+ * 服务发布生成预览SQL 的 table 类
+ *
+ * @author zhaopx
+ *
+ */
+public class Table {
+	
+	/**
+	 * 表名称
+	 */
+	String tableName;
+
+	/**
+	 * 关联时的别名
+	 */
+	String alias;
+
+	/**
+	 * 关联后 select 的表字段,当前表字段
+	 */
+	final List<Field> fields = new ArrayList<>();
+
+
+	/**
+	 * 默认构造方法,JSON 能反序列化
+	 */
+	public Table() {
+		this("TABLE", "A");
+	}
+
+	/**
+	 * 表名,别名为第一个字符
+	 * @param tableName
+	 */
+	public Table(String tableName) {
+		this(tableName, new String(new char[] {tableName.charAt(0)}));
+	}
+
+	/**
+	 * 构造方法
+	 * @param tableName 表名
+	 * @param alias 别名
+	 */
+	public Table(String tableName, String alias) {
+		this.tableName = tableName;
+		this.alias = alias;
+	}
+
+	public Table(String tableName, String alias, String field) {
+		this(tableName, alias, (List)Lists.newArrayList(field));
+	}
+
+	public Table(String tableName, String alias, List<Field> fields) {
+		this(tableName, alias);
+		this.setFields(fields);
+	}
+
+
+	public Table(String tableName, String alias, Field... fields) {
+		this(tableName, alias, Arrays.asList(fields));
+	}
+
+
+	public String getTableName() {
+		return tableName;
+	}
+
+	public void setTableName(String tableName) {
+		this.tableName = tableName;
+	}
+
+	public String getAlias() {
+		return alias;
+	}
+
+
+	public void setAlias(String alias) {
+		this.alias = alias;
+	}
+
+	public List<Field> getFields() {
+		return fields;
+	}
+
+	public void setFields(List<Field> fields) {
+		this.fields.clear();
+		this.fields.addAll(fields);
+	}
+
+	public void addField(String field) {
+		this.fields.add(new Field(field));
+	}
+
+
+	public void addField(Field field) {
+		this.fields.add(field);
+	}
+
+	public void addFields(Field... fields) {
+		this.fields.addAll(Arrays.asList(fields));
+	}
+
+
+	@Override
+	public String toString() {
+		return "Table{" +
+				"tableName='" + tableName + '\'' +
+				", alias='" + alias + '\'' +
+				", fields=" + fields +
+				'}';
+	}
+}

+ 162 - 0
entity-to-sql/model/WhereCause.java

@@ -0,0 +1,162 @@
+/**
+ * 
+ */
+package com.primeton.dsp.datarelease.api.model;
+
+import org.apache.commons.lang.builder.ToStringBuilder;
+
+/**
+ * 
+ * SQL 的  where 条件
+ * 
+ * @author zhaopx
+ *
+ */
+public class WhereCause {
+
+	/**
+	 * 表,where 条件的表
+	 */
+	String tableName;
+
+	/**
+	 * 该字段名称
+	 */
+	String fieldName;
+
+
+	/**
+	 * 左表某字段大于右表某字段的条件
+	 */
+	String toTableName;
+
+
+	/**
+	 * 左表某字段大于右表某字段的条件
+	 */
+	String toFieldName;
+
+	/**
+	 * 数据类型:string, numeric, datetime 三种类型
+	 */
+	String type = "string";
+
+	/**
+	 * 运算逻辑: =(等于),>(大于),<(小于),>=(大于或等于),<=(小于或等于),<> or !=(不等于)
+	 */
+	String opera = "=";
+
+	/**
+	 * 取值
+	 */
+	String value;
+
+    /**
+     * 与上一个条件的连接条件,与或。默认为 and。 and/or
+     */
+	String cond = "and";
+
+	/**
+	 * 构造方法
+	 */
+	public WhereCause() {
+		
+	}
+
+    public WhereCause(String tableName, String fieldName, String value) {
+        this.tableName = tableName;
+        this.fieldName = fieldName;
+        this.value = value;
+    }
+
+    public WhereCause(String tableName, String fieldName, String opera, String value) {
+        this(tableName, fieldName, value);
+        this.opera = opera;
+    }
+
+    public WhereCause(String tableName, String fieldName, String type, String opera, String value) {
+        this(tableName, fieldName, value);
+        this.type = type;
+        this.opera = opera;
+    }
+
+
+    public WhereCause(String tableName, String fieldName, String type, String opera, String value, String cond) {
+        this(tableName, fieldName, value);
+        this.type = type;
+        this.opera = opera;
+        this.cond = cond;
+    }
+
+    
+    
+    public String getTableName() {
+		return tableName;
+	}
+
+	public String getFieldName() {
+		return fieldName;
+	}
+
+	public String getToTableName() {
+		return toTableName;
+	}
+
+	public String getToFieldName() {
+		return toFieldName;
+	}
+
+	public String getType() {
+		return type;
+	}
+
+	public String getOpera() {
+		return opera;
+	}
+
+	public String getValue() {
+		return value;
+	}
+
+	public String getCond() {
+		return cond;
+	}
+
+
+	public void setTableName(String tableName) {
+		this.tableName = tableName;
+	}
+
+	public void setFieldName(String fieldName) {
+		this.fieldName = fieldName;
+	}
+
+	public void setToTableName(String toTableName) {
+		this.toTableName = toTableName;
+	}
+
+	public void setToFieldName(String toFieldName) {
+		this.toFieldName = toFieldName;
+	}
+
+	public void setType(String type) {
+		this.type = type;
+	}
+
+	public void setOpera(String opera) {
+		this.opera = opera;
+	}
+
+	public void setValue(String value) {
+		this.value = value;
+	}
+
+	public void setCond(String cond) {
+		this.cond = cond;
+	}
+
+	@Override
+	public String toString() {
+		return ToStringBuilder.reflectionToString(this);
+	}
+}

+ 172 - 0
entity-to-sql/sql/SQLCreatorFactory.java

@@ -0,0 +1,172 @@
+package com.primeton.dsp.datarelease.api.sql;
+
+import com.google.common.collect.Sets;
+import com.primeton.dsp.datarelease.api.model.JoinPair;
+import com.primeton.dsp.datarelease.api.model.Table;
+import com.primeton.dsp.datarelease.api.model.WhereCause;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang.StringUtils;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * 
+ * 服务发布多表关联,和单表  服务发布时生成预览SQL。
+ * 
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/3/27
+ * Time: 17:15
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+
+@Slf4j
+public abstract class SQLCreatorFactory {
+
+
+    /**
+     * 单表生成 SQL
+     * @return
+     */
+    public abstract String singleTable(Table table, WhereCause[] conds);
+
+
+    /**
+     * 多表生成 SQL
+     *
+     * @param tables
+     * @param joins
+     * @param conds
+     * @return
+     */
+    public abstract String mutilTable(Table[] tables, JoinPair[] joins, WhereCause[] conds);
+
+
+    /**
+     * 生成一个实例
+     * @return
+     */
+    public static SQLCreatorFactory newInstance() {
+        return new SQLCreatorFactory() {
+            @Override
+            public String singleTable(Table table, WhereCause[] conds) {
+                //单表生成SQL预览
+                SQLQuery sqlQuery = new SQLQuery.Builder(table)
+                        .where(conds)
+                        .build();
+
+                //生成的SQL要预览,这里美化一下
+                String showSQL = sqlQuery.show(true);
+                log.info("c sql {}", showSQL);
+                return showSQL;
+            }
+
+            @Override
+            public String mutilTable(Table[] tables, JoinPair[] joins, WhereCause[] conds) {
+                Map<String, SQLJoin.JoinTable> tableMapping = new HashMap<>();
+                for(Table table : tables) {
+                    tableMapping.put(table.getTableName(), new SQLJoin.JoinTable(table));
+                }
+
+                // join 条件,如果量表 join 多次也当做 where
+                List<JoinPair> joinList = new ArrayList<>();
+                // where 条件,把多余的 join 条件放到 where 条件中
+                Set<WhereCause> condSet = Sets.newHashSet(conds);
+
+                // 相同的两个表,join 多次的情况,取第一个作为 join,后面的作为 where 条件
+                Map<String, JoinPair> joinPairCache = new HashMap<>();
+                for(JoinPair jp : joins) {
+                    //左表加右表
+                    String cacheKey = StringUtils.upperCase(jp.getLeft() + jp.getRight());
+                    if(joinPairCache.get(cacheKey) == null) {
+                        joinPairCache.put(cacheKey, jp);
+                        joinList.add(jp);
+                    } else {
+                        // 已经存在关联的 join 转为 where 后的条件
+                        WhereCause cause = new WhereCause();
+                        cause.setTableName(jp.getLeft());
+                        cause.setFieldName(jp.getLeftField());
+                        cause.setToTableName(jp.getRight());
+                        cause.setToFieldName(jp.getRightField());
+                        cause.setOpera("=");
+                        condSet.add(cause);
+                    }
+                }
+
+                SQLJoin.Builder builder = null;
+                if(joinList.size() == 1) {
+                    // 2 个表的关联
+                    SQLJoin.JoinTable left = tableMapping.get(joinList.get(0).getLeft());
+                    SQLJoin.JoinTable right = tableMapping.get(joinList.get(0).getRight());
+
+                    builder = new SQLJoin.Builder(left, joinList.get(0).getLeftField(),
+                            right, joinList.get(0).getRightField(),
+                            SQLJoin.JoinType.joinType(joinList.get(0).getJoinType()));
+                } else {
+                    // 3 个或者3 个以上的表关联
+                    SQLJoin.JoinTable left = tableMapping.get(joinList.get(0).getLeft());
+                    SQLJoin.JoinTable right = tableMapping.get(joinList.get(0).getRight());
+
+                    builder = new SQLJoin.Builder(left, joinList.get(0).getLeftField(),
+                            right, joinList.get(0).getRightField(),
+                            SQLJoin.JoinType.joinType(joinList.get(0).getJoinType()));
+
+                    for(int i = 1; i < joinList.size(); i++) {
+                        JoinPair joinPair = joinList.get(i);
+                        SQLJoin.JoinTable leftTable = tableMapping.get(joinPair.getLeft());
+                        SQLJoin.JoinTable rightTable = tableMapping.get(joinPair.getRight());
+
+                        // 下面使用要重新 new 一个,防止重新设置别名是影响原来的别名
+                        if(builder.isCachedTable(leftTable.getTableName())) {
+                            //A JOIN B JOIN C, 正好是符合的
+                            builder.join(leftTable,
+                                    joinPair.getLeftField(),
+                                    rightTable,
+                                    joinPair.getRightField(),
+                                    SQLJoin.JoinType.joinType(joinPair.getJoinType()));
+                        } else {
+                            // A JOIN C
+                            // B JOIN C 模式,生成 JOIN 应该是 A JOIN C JOIN B 的方式
+                            SQLJoin.JoinType joinType = SQLJoin.JoinType.joinType(joinPair.getJoinType());
+                            SQLJoin.JoinType realJoinType = joinType;
+                            switch (joinType) {
+                                case LEFT_JOIN:
+                                    realJoinType = SQLJoin.JoinType.RIGHT_JOIN;
+                                    break;
+                                case RIGHT_JOIN:
+                                    realJoinType = SQLJoin.JoinType.LEFT_JOIN;
+                                    break;
+                                default:
+                            }
+
+                            // 调换一下顺序
+                            builder.join(
+                                    rightTable,
+                                    joinPair.getRightField(),
+                                    leftTable,
+                                    joinPair.getLeftField(),
+                                    realJoinType);
+                        }
+                    }
+                }
+
+                // 把where 条件加入进来
+                builder.where(condSet);
+                SQLJoin sql = builder.build();
+                // 前端不显示 SQL,如果加了回车在执行时会乱码报错。因此这里不能加回车进行格式化
+                String resultSql = sql.show(true);
+                log.info(resultSql);
+                return resultSql;
+            }
+        };
+    }
+}

+ 590 - 0
entity-to-sql/sql/SQLJoin.java

@@ -0,0 +1,590 @@
+package com.primeton.dsp.datarelease.api.sql;
+
+import com.alibaba.druid.sql.SQLUtils;
+import com.google.common.base.Preconditions;
+import com.primeton.dsp.datarelease.api.model.Field;
+import com.primeton.dsp.datarelease.api.model.SelectField;
+import com.primeton.dsp.datarelease.api.model.Table;
+import com.primeton.dsp.datarelease.api.model.WhereCause;
+import lombok.NonNull;
+import net.sf.jsqlparser.JSQLParserException;
+import net.sf.jsqlparser.expression.Alias;
+import net.sf.jsqlparser.expression.BinaryExpression;
+import net.sf.jsqlparser.expression.DateValue;
+import net.sf.jsqlparser.expression.DoubleValue;
+import net.sf.jsqlparser.expression.Expression;
+import net.sf.jsqlparser.expression.LongValue;
+import net.sf.jsqlparser.expression.StringValue;
+import net.sf.jsqlparser.expression.TimestampValue;
+import net.sf.jsqlparser.expression.operators.conditional.AndExpression;
+import net.sf.jsqlparser.expression.operators.conditional.OrExpression;
+import net.sf.jsqlparser.expression.operators.relational.*;
+import net.sf.jsqlparser.parser.CCJSqlParserUtil;
+import net.sf.jsqlparser.schema.Column;
+import net.sf.jsqlparser.statement.select.*;
+import net.sf.jsqlparser.util.SelectUtils;
+import org.apache.commons.lang.StringUtils;
+
+import java.util.*;
+import java.util.stream.Collectors;
+
+/**
+ * <pre>
+ * 
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2019/2/21
+ * Time: 10:29
+ * Vendor: primeton.com
+ *
+ * </pre>
+ * 
+ * @author zhaopx
+ */
+public abstract class SQLJoin {
+
+	/**
+	 * 输出 SQL
+	 * 
+	 * @return 返回 sql str
+	 */
+	public abstract String show(boolean format);
+
+	public static class Builder {
+
+		/**
+		 * 核心 Select
+		 */
+		Select select;
+
+		/**
+		 * 合并后的 All Fields
+		 */
+		List<SelectField> fields;
+
+		/**
+		 * 内部对表的缓存
+		 */
+		final Map<String, JoinTable> CACHED_TABLE = new HashMap<>();
+
+		
+		/**
+		 * 别名和表的缓存
+		 */
+		final Map<String, JoinTable> ALIAS_NAME_CACHED_TABLE = new HashMap<>();
+		
+		
+		/**
+		 * 两个表关联
+		 * 
+		 * @param left
+		 *            左表
+		 * @param right
+		 *            右表
+		 * @param type
+		 *            关联方式
+		 */
+		public Builder(JoinTable left, String leftField, JoinTable right, String rightField, JoinType type) {
+			// 合并需要查询的列,所有的列,不能重复
+			List<SelectField> fields = new ArrayList<>();
+			// 去重复后的字段 ID
+			Set<String> distictFields = new HashSet<>();
+
+			// 检查左表是否有重复的字段
+			for(SelectField col : left.getTableFields()) {
+				String tmpField = StringUtils.upperCase(col.getExpression().getColumnName());
+				if(!distictFields.contains(tmpField)) {
+					distictFields.add(tmpField);
+					fields.add(col);
+				}
+
+			}
+
+			// 检查右表是否有重复的字段
+			for(SelectField col : right.getTableFields()) {
+				String tmpField = StringUtils.upperCase(col.getExpression().getColumnName());
+				if(!distictFields.contains(tmpField)) {
+					distictFields.add(tmpField);
+					fields.add(col);
+				}
+			}
+
+			ALIAS_NAME_CACHED_TABLE.put(StringUtils.upperCase(left.getAlias()), left);
+			if(ALIAS_NAME_CACHED_TABLE.get(StringUtils.upperCase(right.getAlias())) != null) {
+				// 表的别名重名了,已经存在
+				right.setAlias(right.getAlias()+"1"); // 第一次重名,可以确定的
+			}
+			// 如果别名重复,可能修改了别名的
+			ALIAS_NAME_CACHED_TABLE.put(StringUtils.upperCase(right.getAlias()), right);
+
+			// 生成字段别名
+			Expression[] columns = (Expression[]) fields.toArray(new SelectField[fields.size()]);
+			Select select = SelectUtils.buildSelectFromTableAndExpressions(
+					left.getTable(), columns);
+			Join join = SelectUtils.addJoin(select, right.getTable(), null);
+
+			EqualsTo on = new EqualsTo();
+			on.setLeftExpression(left.getJoinColumn(leftField));
+			on.setRightExpression(right.getJoinColumn(rightField));
+
+			join.setOnExpression(on);
+
+			switch (type) {
+			case INNER_JOIN:
+				join.setInner(true);
+				break;
+			case LEFT_JOIN:
+				join.setLeft(true);
+				break;
+			case RIGHT_JOIN:
+				join.setRight(true);
+				break;
+			default:
+				join.setFull(true);
+			}
+
+			CACHED_TABLE.put(StringUtils.upperCase(left.getTableName()), left);
+			CACHED_TABLE.put(StringUtils.upperCase(right.getTableName()), right);
+
+			this.select = select;
+			this.fields = fields;
+		}
+
+		/**
+		 * 以当前关联结果再次关联
+		 * @param leftJoinTable 左边的关联表
+         * @param leftField 左边的关联字段
+		 * @param rightJoinTable 右边关联表
+         * @param rightField 右边的关联字段
+		 * @param type 关联方式
+		 */
+		public Builder join(JoinTable leftJoinTable, String leftField, JoinTable rightJoinTable, String rightField, JoinType type) {
+			JoinTable table1 = CACHED_TABLE.get(StringUtils.upperCase(leftJoinTable.getTableName()));
+			if (table1 == null) {
+				// 关联表没有加入到 cache
+				table1 = leftJoinTable;
+				CACHED_TABLE.put(StringUtils.upperCase(leftJoinTable.getTableName()), leftJoinTable);
+			}
+			
+			// 检查别名是否重复
+			if(ALIAS_NAME_CACHED_TABLE.get(StringUtils.upperCase(rightJoinTable.getAlias())) != null) {
+				// 表的别名重名了,已经存在
+				rightJoinTable.setAlias(rightJoinTable.getAlias()+ALIAS_NAME_CACHED_TABLE.size());
+			}
+			
+			// 如果别名重复,可能修改了别名的
+			ALIAS_NAME_CACHED_TABLE.put(StringUtils.upperCase(rightJoinTable.getAlias()), rightJoinTable);
+
+			try {
+				// 去重复后的字段 ID
+				Set<String> distictFields = new HashSet<>();
+
+				for (SelectField col : this.fields) {
+					String tmpField = StringUtils.upperCase(col.getExpression().getColumnName());
+					distictFields.add(tmpField);
+				}
+
+				// 拿到所有需要查询的字段,可能存在重复的
+				List<SelectField> addTableFields = rightJoinTable.getTableFields();
+				List<Column> tableFields = new ArrayList<>(addTableFields.size());
+				// 检查左表是否有重复的字段
+				for(SelectField col : addTableFields) {
+					String tmpField = StringUtils.upperCase(col.getExpression().getColumnName());
+					// 不存在才放进来
+					if(!distictFields.contains(tmpField)) {
+						distictFields.add(tmpField);
+						fields.add(col);
+						tableFields.add(col.getExpression());
+					}
+				}
+
+				SelectItem[] addField = new SelectItem[tableFields.size()];
+				for (int i = 0; i < tableFields.size(); i++) {
+					addField[i] = new SelectExpressionItem(
+							CCJSqlParserUtil.parseExpression(tableFields.get(i)
+									.getName(true)));
+				}
+
+				SelectBody selectBody = select.getSelectBody();
+				((PlainSelect) selectBody).addSelectItems(addField);
+			} catch (JSQLParserException e) {
+				throw new IllegalStateException(e);
+			}
+
+			Join join = SelectUtils.addJoin(select, rightJoinTable.getTable(), null);
+
+			EqualsTo on2 = new EqualsTo();
+			on2.setLeftExpression(table1.getJoinColumn(leftField));
+			on2.setRightExpression(rightJoinTable.getJoinColumn(rightField));
+
+			join.setOnExpression(on2);
+			switch (type) {
+			case INNER_JOIN:
+				join.setInner(true);
+				break;
+			case LEFT_JOIN:
+				join.setLeft(true);
+				break;
+			case RIGHT_JOIN:
+				join.setRight(true);
+				break;
+			default:
+				join.setFull(true);
+			}
+
+			CACHED_TABLE.put(StringUtils.upperCase(rightJoinTable.getTableName()), rightJoinTable);
+			return this;
+		}
+
+
+		/**
+		 * 是否是已经关联缓存的表
+		 * @param tableName
+		 * @return
+		 */
+		public boolean isCachedTable(@NonNull  String tableName) {
+			return CACHED_TABLE.get(StringUtils.upperCase(tableName)) != null;
+		}
+
+
+		/**
+		 * 设置 where 调用条件。调用 where 后就不应该再调用 join 了
+		 * @return
+		 */
+		public Builder where(WhereCause... wheres) {
+		    if(wheres == null || wheres.length == 0) {
+		        // 没有可加的条件
+		        return this;
+            }
+            PlainSelect ps = (PlainSelect)select.getSelectBody();
+            Expression where = ps.getWhere();
+            if(where == null && wheres.length == 1) {
+		        // 一个条件,就这样了。
+                JoinTable table = CACHED_TABLE.get(StringUtils.upperCase(wheres[0].getTableName()));
+                Expression expr = buildExpression(table, wheres[0]);
+                ps.setWhere(expr);
+            } else if(where == null){
+                // where is null,wheres 第一个不加 and,后续都加 and。
+                JoinTable table = CACHED_TABLE.get(StringUtils.upperCase(wheres[0].getTableName()));
+                Expression firstExpr = buildExpression(table, wheres[0]);
+
+                WhereCause[] whereCauses1toEnd = new WhereCause[wheres.length - 1];
+                System.arraycopy(wheres, 1, whereCauses1toEnd, 0, whereCauses1toEnd.length);
+
+                ps.setWhere(buildWhereCause(firstExpr, whereCauses1toEnd));
+
+            } else {
+                // where is not null,第一个条件就需要加 and
+                ps.setWhere(buildWhereCause(where, wheres));
+            }
+			return this;
+		}
+
+
+        /**
+         * 创建循环的 where 条件
+         * @param wheres 一个或者多个 where
+         * @return
+         */
+		private Expression buildWhereCause(Expression last, WhereCause... wheres) {
+		    if(wheres.length == 1) {
+                JoinTable table = CACHED_TABLE.get(StringUtils.upperCase(wheres[0].getTableName()));
+                Expression expression = buildExpression(table, wheres[0]);
+                BinaryExpression expr = null;
+                if("or".equalsIgnoreCase(wheres[0].getCond())){
+                    expr = new OrExpression(last, expression);
+                } else {
+                    expr = new AndExpression(last, expression);
+                }
+                return expr;
+            }
+
+            JoinTable table = CACHED_TABLE.get(StringUtils.upperCase(wheres[0].getTableName()));
+            Expression addExpr = buildExpression(table, wheres[0]);
+
+            BinaryExpression expr = null;
+            if("or".equalsIgnoreCase(wheres[0].getCond())){
+                expr = new OrExpression(last, addExpr);
+            } else {
+                expr = new AndExpression(last, addExpr);
+            }
+
+            WhereCause[] whereCauses1toEnd = new WhereCause[wheres.length - 1];
+            System.arraycopy(wheres, 1, whereCauses1toEnd, 0, whereCauses1toEnd.length);
+
+            // 递归处理每一个表达式
+            return buildWhereCause(expr, whereCauses1toEnd);
+        }
+
+
+        /**
+         * 根据 where 条件,把前台选择的条件转为 sql 支持的结构。
+         * @param table 表名称
+         * @param cause 条件
+         * @return
+         */
+		private Expression buildExpression(JoinTable table, WhereCause cause) {
+			String[] mutilValue = cause.getValue() != null ? cause.getValue().split(",") : new String[]{};
+			if (mutilValue.length > 1) {
+				// 多值的情况 select * from table where id in('a', 'b')
+				// 多值的必须是 in 或者 notin 的情况,如果不是,强制改变语句为 in 的条件
+				String opera = cause.getOpera();
+				if(!"in".equalsIgnoreCase(opera) && !"notin".equalsIgnoreCase(opera)) {
+					cause.setOpera("in");
+				}
+				// 多个值的情况
+				return buildSingleValue(table, cause);
+			}
+			// 单值的条件
+			return buildSingleValue(table, cause);
+		}
+
+
+		/**
+		 * 编译单值的条件
+		 * @param table
+		 * @param cause
+		 * @return
+		 */
+		private Expression buildSingleValue(JoinTable table, WhereCause cause) {
+			Expression valueExpr = null;
+			if(StringUtils.isNotBlank(cause.getToTableName()) && StringUtils.isNotBlank(cause.getToFieldName())) {
+				// 第二个表名称和表字段名都不为 null,则表达式的值为第二个表中的字段
+				// a.AGE > b.AGE
+				JoinTable tmpTable = this.CACHED_TABLE.get(StringUtils.upperCase(cause.getToTableName()));
+				valueExpr = new Column(tmpTable.getTable(), cause.getToFieldName());
+			} else if("INT".equalsIgnoreCase(cause.getType())) {
+				// 表达式为常量,但是值为数值类型,SQL 中数值不加引号
+				valueExpr = new LongValue(cause.getValue());
+			} else if("DOUBLE".equalsIgnoreCase(cause.getType())) {
+				// 表达式为常量,但是值为浮点类型,SQL 中数值不加引号
+				valueExpr = new DoubleValue(cause.getValue());
+			} else if("DATE".equalsIgnoreCase(cause.getType())) {
+				// 表达式为常量,但是值为日期类型,SQL 中数值不加引号
+				// 日期类型为:yyyy-[M]M-[d]d
+				valueExpr = new DateValue(cause.getValue());
+			} else if("DATETIME".equalsIgnoreCase(cause.getType())) {
+				// 表达式为常量,但是值为日期类型,SQL 中数值不加引号
+				// 日期类型为:yyyy-[M]M-[d]d HH:mm:ss
+				valueExpr = new TimestampValue(cause.getValue());
+			} else {
+				// 表达式值为常量,字符串, NAME = 'X'
+				valueExpr = new StringValue(cause.getValue());
+			}
+
+		    if("=".equals(cause.getOpera())) {
+                EqualsTo equals = new EqualsTo();
+                equals.setLeftExpression(new Column(table.getTable(), cause.getFieldName()));
+                equals.setRightExpression(valueExpr);
+                return equals;
+            } else if(">".equals(cause.getOpera())) {
+                GreaterThan greaterThan = new GreaterThan();
+                greaterThan.setLeftExpression(new Column(table.getTable(), cause.getFieldName()));
+                greaterThan.setRightExpression(valueExpr);
+                return greaterThan;
+            } else if(">=".equals(cause.getOpera())) {
+                GreaterThanEquals greaterThanEquals = new GreaterThanEquals();
+                greaterThanEquals.setLeftExpression(new Column(table.getTable(), cause.getFieldName()));
+                greaterThanEquals.setRightExpression(valueExpr);
+                return greaterThanEquals;
+            } else if("<".equals(cause.getOpera())) {
+                MinorThan minorThan = new MinorThan();
+                minorThan.setLeftExpression(new Column(table.getTable(), cause.getFieldName()));
+                minorThan.setRightExpression(valueExpr);
+                return minorThan;
+            } else if("<=".equals(cause.getOpera())) {
+                MinorThanEquals minorThanEquals = new MinorThanEquals();
+                minorThanEquals.setLeftExpression(new Column(table.getTable(), cause.getFieldName()));
+                minorThanEquals.setRightExpression(valueExpr);
+                return minorThanEquals;
+            } else if("<>".equals(cause.getOpera()) || "!=".equals(cause.getOpera())) {
+                NotEqualsTo notEqualsTo = new NotEqualsTo();
+                notEqualsTo.setLeftExpression(new Column(table.getTable(), cause.getFieldName()));
+                notEqualsTo.setRightExpression(valueExpr);
+                return notEqualsTo;
+            } else if("in".equalsIgnoreCase(cause.getOpera())) {
+				String[] mutilValue = cause.getValue().split(",");
+				InExpression inExpression = new InExpression();
+				inExpression.setLeftExpression(new Column(table.getTable(), cause.getFieldName()));
+
+				MultiExpressionList list = new MultiExpressionList();
+				List<Expression> inVals = new ArrayList<>();
+				for (String val : mutilValue) {
+					inVals.add(new StringValue(val));
+				}
+				list.addExpressionList(new ExpressionList(inVals));
+				inExpression.setRightItemsList(list);
+				return inExpression;
+			} else if("notin".equalsIgnoreCase(cause.getOpera())) {
+				String[] mutilValue = cause.getValue().split(",");
+				InExpression inExpression = new InExpression();
+				inExpression.setLeftExpression(new Column(table.getTable(), cause.getFieldName()));
+
+				// not 否定条件
+				inExpression.setNot(true);
+
+				MultiExpressionList list = new MultiExpressionList();
+				List<Expression> inVals = new ArrayList<>();
+				for (String val : mutilValue) {
+					inVals.add(new StringValue(val));
+				}
+				list.addExpressionList(new ExpressionList(inVals));
+				inExpression.setRightItemsList(list);
+				return inExpression;
+			} else if("l".equalsIgnoreCase(cause.getOpera())) {
+				Preconditions.checkNotNull(cause.getValue(), " like value must not be blank");
+				// 如果自带 %,则说明需要匹配值的 %,用转义
+				String likeValue = cause.getValue().replaceAll("%", "\\%");
+				valueExpr = new StringValue("%"+likeValue+"%");
+				LikeExpression likeExpression = new LikeExpression();
+				likeExpression.setLeftExpression(new Column(table.getTable(), cause.getFieldName()));
+				likeExpression.setRightExpression(valueExpr);
+				return likeExpression;
+			}
+
+
+            EqualsTo equals = new EqualsTo();
+            equals.setLeftExpression(new Column(table.getTable(), cause.getFieldName()));
+            equals.setRightExpression(valueExpr);
+            return equals;
+        }
+
+
+        /**
+         * 设置 where 调用条件。调用 where 后就不应该再调用 join 了
+         * @return
+         */
+        public Builder where(Collection<WhereCause> wheres) {
+            where(wheres.toArray(new WhereCause[wheres.size()]));
+            return this;
+        }
+
+		/**
+		 * 创建 SQL
+		 * 
+		 * @return
+		 */
+		public SQLJoin build() {
+			return new SQLJoin() {
+				@Override
+				public String show(boolean format) {
+				    if(!format) {
+                        return select.toString();
+                    }
+                    String sql = select.toString();
+					sql = SQLUtils.formatMySql(sql);
+                    return sql;
+				}
+			};
+		}
+	}
+
+	
+	/**
+	 * 数据表 
+	 * @author zhaopx
+	 *
+	 */
+	public static class JoinTable extends Table {
+
+	
+		/**
+		 * 内部 sqlparser 的表结构
+		 */
+		final net.sf.jsqlparser.schema.Table table;
+
+		public JoinTable(Table table){
+			this(table.getTableName(), table.getAlias(), table.getFields());
+		}
+		
+		
+		/**
+		 * 构造器
+		 * 
+		 * @param tableName
+		 *            表名称
+		 * @param alias
+		 *            关联时的别名
+		 * @param fields
+		 *            关联后 select 的表字段,当前表字段
+		 */
+		public JoinTable(String tableName, String alias, List<Field> fields) {
+			super(tableName, alias, fields);
+			table = new net.sf.jsqlparser.schema.Table(tableName);
+			table.setAlias(new Alias(alias, false));
+		}
+
+
+		public net.sf.jsqlparser.schema.Table getTable() {
+			return table;
+		}
+
+		public List<SelectField> getTableFields() {
+			if (getFields() == null) {
+				return Collections.emptyList();
+			}
+			List<SelectField> cols = new ArrayList<>(getFields().size());
+			for (Field col : getFields()) {
+				Column column = new Column(table, col.getFieldName());
+				SelectField exprs = new SelectField(column);
+				exprs.setAlias(new Alias(col.getAlias(), true));
+				cols.add(exprs);
+			}
+			return cols;
+		}
+
+		public Column getJoinColumn(String joinField) {
+			return new Column(table, joinField);
+		}
+
+
+		@Override
+		public void setAlias(String alias) {
+			super.setAlias(alias);
+			table.setAlias(new Alias(alias, false));
+		}
+	}
+
+	
+	/**
+	 * 关联类型
+	 * @author zhaopx
+	 *
+	 */
+	public static enum JoinType {
+		/**
+		 * 内连接
+		 */
+		INNER_JOIN,
+
+		/**
+		 * 左外连接
+		 */
+		LEFT_JOIN,
+
+		/**
+		 * 右外连接
+		 */
+		RIGHT_JOIN,
+
+		/**
+		 * 全连接
+		 */
+		FULL_OUTER_JOIN;
+		
+		
+		public static JoinType joinType(String joinType) {
+			if("INNER_JOIN".equalsIgnoreCase(joinType)) {
+				return INNER_JOIN;
+			} else if("LEFT_JOIN".equalsIgnoreCase(joinType)) {
+				return LEFT_JOIN;
+			} else if("RIGHT_JOIN".equalsIgnoreCase(joinType)) {
+				return RIGHT_JOIN;
+			} else if("FULL_OUTER_JOIN".equalsIgnoreCase(joinType)) {
+				return FULL_OUTER_JOIN;
+			}
+			return INNER_JOIN;
+		}
+	}
+
+}

+ 353 - 0
entity-to-sql/sql/SQLQuery.java

@@ -0,0 +1,353 @@
+package com.primeton.dsp.datarelease.api.sql;
+
+import com.alibaba.druid.sql.SQLUtils;
+import com.google.common.base.Preconditions;
+import com.primeton.dsp.datarelease.api.model.Field;
+import com.primeton.dsp.datarelease.api.model.Table;
+import com.primeton.dsp.datarelease.api.model.WhereCause;
+import net.sf.jsqlparser.expression.Alias;
+import net.sf.jsqlparser.expression.BinaryExpression;
+import net.sf.jsqlparser.expression.DateValue;
+import net.sf.jsqlparser.expression.DoubleValue;
+import net.sf.jsqlparser.expression.Expression;
+import net.sf.jsqlparser.expression.LongValue;
+import net.sf.jsqlparser.expression.StringValue;
+import net.sf.jsqlparser.expression.TimestampValue;
+import net.sf.jsqlparser.expression.operators.conditional.AndExpression;
+import net.sf.jsqlparser.expression.operators.conditional.OrExpression;
+import net.sf.jsqlparser.expression.operators.relational.EqualsTo;
+import net.sf.jsqlparser.expression.operators.relational.ExpressionList;
+import net.sf.jsqlparser.expression.operators.relational.GreaterThan;
+import net.sf.jsqlparser.expression.operators.relational.GreaterThanEquals;
+import net.sf.jsqlparser.expression.operators.relational.InExpression;
+import net.sf.jsqlparser.expression.operators.relational.LikeExpression;
+import net.sf.jsqlparser.expression.operators.relational.MinorThan;
+import net.sf.jsqlparser.expression.operators.relational.MinorThanEquals;
+import net.sf.jsqlparser.expression.operators.relational.MultiExpressionList;
+import net.sf.jsqlparser.expression.operators.relational.NotEqualsTo;
+import net.sf.jsqlparser.schema.Column;
+import net.sf.jsqlparser.statement.select.PlainSelect;
+import net.sf.jsqlparser.statement.select.Select;
+import net.sf.jsqlparser.statement.select.SelectExpressionItem;
+import net.sf.jsqlparser.statement.select.SelectItem;
+import net.sf.jsqlparser.util.SelectUtils;
+import org.apache.commons.lang.StringUtils;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ *
+ * 单表服务发布,SQL 生成预览
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/3/25
+ * Time: 17:50
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public abstract class SQLQuery {
+
+
+    /**
+     * 输出 SQL
+     *
+     * @return 返回 sql str
+     */
+    public abstract String show(boolean format);
+
+
+
+    public static class Builder {
+
+
+        /**
+         * 核心 Select
+         */
+        Select select;
+
+
+        public Builder(Table table) {
+            net.sf.jsqlparser.schema.Table t = new net.sf.jsqlparser.schema.Table(table.getTableName());
+            t.setAlias(new Alias(table.getAlias(), false));
+
+            Select select = null;
+
+            // select x from t
+            if(table.getFields().isEmpty()) {
+                select = SelectUtils.buildSelectFromTable(t);
+            } else {
+                List<Field> fields = table.getFields();
+                SelectItem[] items = new SelectItem[fields.size()];
+                int i = 0;
+                for (Field field : fields) {
+                    SelectExpressionItem item = new SelectExpressionItem(new Column(t, field.getFieldName()));
+                    item.setAlias(new Alias(field.getAlias(), true));
+                    items[i] = item;
+                    i++;
+                }
+                select = SelectUtils.buildSelectFromTableAndSelectItems(t, items);
+            }
+
+            this.select = select;
+        }
+
+
+        public Builder(Table table, WhereCause... conds) {
+            this(table);
+
+            // 处理条件
+            where(conds);
+        }
+
+
+        /**
+         * 设置 where 调用条件。调用 where 后就不应该再调用 join 了
+         * @return
+         */
+        public Builder where(WhereCause... wheres) {
+            if(wheres == null || wheres.length == 0) {
+                // 没有可加的条件
+                return this;
+            }
+            PlainSelect ps = (PlainSelect)select.getSelectBody();
+            Expression where = ps.getWhere();
+            net.sf.jsqlparser.schema.Table table = (net.sf.jsqlparser.schema.Table)ps.getFromItem();
+            if(where == null && wheres.length == 1) {
+                // 一个条件,就这样了。
+
+                Expression expr = buildExpression(table, wheres[0]);
+                ps.setWhere(expr);
+            } else if(where == null){
+                // where is null,wheres 第一个不加 and,后续都加 and。
+                Expression firstExpr = buildExpression(table, wheres[0]);
+
+                WhereCause[] whereCauses1toEnd = new WhereCause[wheres.length - 1];
+                System.arraycopy(wheres, 1, whereCauses1toEnd, 0, whereCauses1toEnd.length);
+
+                ps.setWhere(buildWhereCause(table, firstExpr, whereCauses1toEnd));
+
+            } else {
+                // where is not null,第一个条件就需要加 and
+                ps.setWhere(buildWhereCause(table, where, wheres));
+            }
+            return this;
+        }
+
+
+
+        /**
+         * 创建循环的 where 条件
+         * @param wheres 一个或者多个 where
+         * @return
+         */
+        private Expression buildWhereCause(net.sf.jsqlparser.schema.Table table, Expression last, WhereCause... wheres) {
+            if(wheres.length == 1) {
+                Expression expression = buildExpression(table, wheres[0]);
+                BinaryExpression expr = null;
+                if("or".equalsIgnoreCase(wheres[0].getCond())){
+                    expr = new OrExpression(last, expression);
+                } else {
+                    expr = new AndExpression(last, expression);
+                }
+                return expr;
+            }
+
+            Expression addExpr = buildExpression(table, wheres[0]);
+
+            BinaryExpression expr = null;
+            if("or".equalsIgnoreCase(wheres[0].getCond())){
+                expr = new OrExpression(last, addExpr);
+            } else {
+                expr = new AndExpression(last, addExpr);
+            }
+
+            WhereCause[] whereCauses1toEnd = new WhereCause[wheres.length - 1];
+            System.arraycopy(wheres, 1, whereCauses1toEnd, 0, whereCauses1toEnd.length);
+
+            // 递归处理每一个表达式
+            return buildWhereCause(table, expr, whereCauses1toEnd);
+        }
+
+
+
+        /**
+         * 根据 where 条件,把前台选择的条件转为 sql 支持的结构。
+         * @param table 表名称
+         * @param cause 条件
+         * @return
+         */
+        private Expression buildExpression(net.sf.jsqlparser.schema.Table table, WhereCause cause) {
+            String[] mutilValue = cause.getValue() != null ? cause.getValue().split(",") : new String[]{};
+            if (mutilValue.length > 1) {
+                // 多值的情况 select * from table where id in('a', 'b')
+                // 多值的必须是 in 或者 notin 的情况,如果不是,强制改变语句为 in 的条件
+                String opera = cause.getOpera();
+                if(!"in".equalsIgnoreCase(opera) && !"notin".equalsIgnoreCase(opera)) {
+                    cause.setOpera("in");
+                }
+                // 多个值的情况
+                return buildSingleValue(table, cause);
+            }
+            // 单值的条件
+            return buildSingleValue(table, cause);
+        }
+
+
+
+        /**
+         * 编译单值的条件
+         * @param table
+         * @param cause
+         * @return
+         */
+        private Expression buildSingleValue(net.sf.jsqlparser.schema.Table table, WhereCause cause) {
+            Expression valueExpr = null;
+            if(StringUtils.isNotBlank(cause.getToFieldName())) {
+                // 第二个表名称和表字段名都不为 null,则表达式的值为第二个表中的字段
+                // a.AGE > a.AGE2
+                valueExpr = new Column(table, cause.getToFieldName());
+            } else if("INT".equalsIgnoreCase(cause.getType())) {
+                // 表达式为常量,但是值为数值类型,SQL 中数值不加引号
+                valueExpr = new LongValue(cause.getValue());
+            } else if("DOUBLE".equalsIgnoreCase(cause.getType())) {
+                // 表达式为常量,但是值为浮点类型,SQL 中数值不加引号
+                valueExpr = new DoubleValue(cause.getValue());
+            } else if("DATE".equalsIgnoreCase(cause.getType())) {
+                // 表达式为常量,但是值为日期类型,SQL 中数值不加引号
+                // 日期类型为:yyyy-[M]M-[d]d
+                valueExpr = new DateValue(cause.getValue());
+            } else if("DATETIME".equalsIgnoreCase(cause.getType())) {
+                // 表达式为常量,但是值为日期类型,SQL 中数值不加引号
+                // 日期类型为:yyyy-[M]M-[d]d HH:mm:ss
+                valueExpr = new TimestampValue(cause.getValue());
+            } else {
+                // 表达式值为常量,字符串, NAME = 'X'
+                valueExpr = new StringValue(cause.getValue());
+            }
+
+            if("=".equals(cause.getOpera())) {
+                EqualsTo equals = new EqualsTo();
+                equals.setLeftExpression(new Column(table, cause.getFieldName()));
+                equals.setRightExpression(valueExpr);
+                return equals;
+            } else if(">".equals(cause.getOpera())) {
+                GreaterThan greaterThan = new GreaterThan();
+                greaterThan.setLeftExpression(new Column(table, cause.getFieldName()));
+                greaterThan.setRightExpression(valueExpr);
+                return greaterThan;
+            } else if(">=".equals(cause.getOpera())) {
+                GreaterThanEquals greaterThanEquals = new GreaterThanEquals();
+                greaterThanEquals.setLeftExpression(new Column(table, cause.getFieldName()));
+                greaterThanEquals.setRightExpression(valueExpr);
+                return greaterThanEquals;
+            } else if("<".equals(cause.getOpera())) {
+                MinorThan minorThan = new MinorThan();
+                minorThan.setLeftExpression(new Column(table, cause.getFieldName()));
+                minorThan.setRightExpression(valueExpr);
+                return minorThan;
+            } else if("<=".equals(cause.getOpera())) {
+                MinorThanEquals minorThanEquals = new MinorThanEquals();
+                minorThanEquals.setLeftExpression(new Column(table, cause.getFieldName()));
+                minorThanEquals.setRightExpression(valueExpr);
+                return minorThanEquals;
+            } else if("<>".equals(cause.getOpera()) || "!=".equals(cause.getOpera())) {
+                NotEqualsTo notEqualsTo = new NotEqualsTo();
+                notEqualsTo.setLeftExpression(new Column(table, cause.getFieldName()));
+                notEqualsTo.setRightExpression(valueExpr);
+                return notEqualsTo;
+            } else if("in".equalsIgnoreCase(cause.getOpera())) {
+                String[] mutilValue = cause.getValue().split(",");
+                InExpression inExpression = new InExpression();
+                inExpression.setLeftExpression(new Column(table, cause.getFieldName()));
+
+                MultiExpressionList list = new MultiExpressionList();
+                List<Expression> inVals = new ArrayList<>();
+                for (String val : mutilValue) {
+                    inVals.add(new StringValue(val));
+                }
+                list.addExpressionList(new ExpressionList(inVals));
+                inExpression.setRightItemsList(list);
+                return inExpression;
+            } else if("notin".equalsIgnoreCase(cause.getOpera())) {
+                String[] mutilValue = cause.getValue().split(",");
+                InExpression inExpression = new InExpression();
+                inExpression.setLeftExpression(new Column(table, cause.getFieldName()));
+
+                // not 否定条件
+                inExpression.setNot(true);
+
+                MultiExpressionList list = new MultiExpressionList();
+                List<Expression> inVals = new ArrayList<>();
+                for (String val : mutilValue) {
+                    inVals.add(new StringValue(val));
+                }
+                list.addExpressionList(new ExpressionList(inVals));
+                inExpression.setRightItemsList(list);
+                return inExpression;
+            } else if("l".equalsIgnoreCase(cause.getOpera())) {
+                Preconditions.checkNotNull(cause.getValue(), " like value must not be blank");
+                // 如果自带 %,则说明需要匹配值的 %,用转义
+                String likeValue = cause.getValue().replaceAll("%", "\\%");
+                valueExpr = new StringValue("%"+likeValue+"%");
+                LikeExpression likeExpression = new LikeExpression();
+                likeExpression.setLeftExpression(new Column(table, cause.getFieldName()));
+                likeExpression.setRightExpression(valueExpr);
+                return likeExpression;
+            }
+
+
+            EqualsTo equals = new EqualsTo();
+            equals.setLeftExpression(new Column(table, cause.getFieldName()));
+            equals.setRightExpression(valueExpr);
+            return equals;
+        }
+
+
+        /**
+         * 设置 where 调用条件。调用 where 后就不应该再调用 join 了
+         * @return
+         */
+        public Builder where(Collection<WhereCause> wheres) {
+            where(wheres.toArray(new WhereCause[wheres.size()]));
+            return this;
+        }
+
+
+        /**
+         * 创建 SQL
+         *
+         * @return
+         */
+        public SQLQuery build() {
+            return new SQLQuery() {
+                @Override
+                public String show(boolean format) {
+                    if(!format) {
+                        return select.toString();
+                    }
+                    String sql = select.toString();
+                    // 美化一下 SQL
+                    sql = SQLUtils.formatMySql(sql);
+                    return sql;
+                }
+            };
+        }
+    }
+
+
+    public static void main(String[] args) {
+        StringValue valueExpr = new StringValue("%王%");
+        LikeExpression likeExpression = new LikeExpression();
+        likeExpression.setLeftExpression(new Column("NAME"));
+        likeExpression.setRightExpression(valueExpr);
+
+        System.out.println(likeExpression.toString());
+    }
+}

+ 66 - 0
hadoop-auth/AuthPrincipal.java

@@ -0,0 +1,66 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+import java.io.File;
+
+/**
+ *
+ * Hive Hbase 租户认证信息
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/20
+ * Time: 10:52
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public  abstract class AuthPrincipal {
+
+
+    /**
+     * 返回租户名称
+     * @return
+     */
+    public abstract String getPrincipal();
+
+    /**
+     * 返回租户描述
+     * @return
+     */
+    public abstract String getPrincipalDesc();
+
+    /**
+     * 返回用户 user keytab 文件地址
+     * @return
+     */
+    public abstract File getUserKeytabFile();
+
+
+    /**
+     * 返回用户 user krb5 文件地址
+     * @return
+     */
+    public abstract File getKrb5File();
+
+
+    /**
+     * 返回用户 user hive client 文件地址, FI 特有
+     * @return
+     */
+    public abstract File getHiveClientFile();
+
+
+    public abstract File getCoreSite();
+
+
+    public abstract File getHdfsSite();
+
+
+    public abstract File getHiveSite();
+
+
+    public abstract File getHBaseSite();
+}

+ 212 - 0
hadoop-auth/AuthPrincipalCreator.java

@@ -0,0 +1,212 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+import lombok.NonNull;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.SystemUtils;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ *
+ * Hive HBase 租户验证
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/20
+ * Time: 10:53
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Slf4j
+public final class AuthPrincipalCreator {
+
+
+    /**
+     * 基础路径
+     */
+    private final String basePath;
+
+
+    public AuthPrincipalCreator(String basePath) {
+        File basePathFile = new File(basePath);
+        if(!basePathFile.exists()) {
+            if(!basePathFile.mkdirs()) {
+                throw new IllegalStateException("目录不存在, 无法创建!Cause: dir: " + basePath + " not found!");
+            }
+        }
+        this.basePath = basePathFile.getAbsolutePath();
+    }
+
+
+    /**
+     * 采用 EOS 8 的外置目录
+     * @return
+     */
+    public static AuthPrincipalCreator useDataReleaseConf(String basePathFirst) {
+        // 采用 传来的 地址
+        String externalDir = basePathFirst;
+        if(StringUtils.isNotBlank(basePathFirst) && new File(basePathFirst).exists()) {
+            // 如果穿来的地址存在则用传来的地址
+            return new AuthPrincipalCreator(externalDir);
+        }
+
+        // 不存在则使用 datarelease_home 下的 conf/principal 的地址
+        externalDir = System.getenv("DATARELEASE_HOME");
+        if(StringUtils.isBlank(externalDir)) {
+            externalDir = System.getProperty("datarelease.home");
+        }
+
+        if(StringUtils.isBlank(externalDir)) {
+            externalDir = "./";
+        }
+        String principalBasePath = new File(externalDir, "conf/principal").getAbsolutePath();
+        log.info("use principal dir: {}", principalBasePath);
+        return new AuthPrincipalCreator(principalBasePath);
+    }
+
+    /**
+     * 返回该基础目录下所有的租户
+     * @return
+     */
+    public final Set<String> listPrincipals() {
+        // 返回基础目录下所有不以点开头的文件
+        return Arrays.stream(new File(basePath).listFiles(it -> !it.getName().startsWith(".")))
+                .map(it->it.getName())
+                .collect(Collectors.toSet());
+    }
+
+
+    /**
+     * 返回租户认证信息
+     * @param principal
+     * @return
+     */
+    public final AuthPrincipal getKerberosPrincipal(@NonNull String principal) {
+        String principalName = principal;
+        if(principal.contains("/") || principal.contains("@")) {
+            // 取第一个字符就是登陆名称
+            principalName = principal.split("/|@", -1)[0];
+        }
+        // 先判断是否存在租户
+        if(!existsPrincipal(principalName)) {
+            throw new IllegalStateException("不存在该租户【" + principal + "】。");
+        }
+        return new KerberosPrincipalImpl(principalName, principal, new File(basePath, principalName).getAbsolutePath());
+    }
+
+
+    /**
+     * 返回是否存在该租户
+     * @param principal
+     * @return
+     */
+    public final boolean existsPrincipal(String principal) {
+        return new File(basePath, principal).exists();
+    }
+
+
+    /**
+     * 返回基础路径
+     * @return
+     */
+    public String getAuthBasePath() {
+        return basePath;
+    }
+}
+
+class KerberosPrincipalImpl extends AuthPrincipal {
+
+
+    /**
+     * 租户名,目录名
+     */
+    String principalName;
+
+
+    /**
+     * 租户全称
+     */
+    String principal;
+
+
+    /**
+     * 租户目录
+     */
+    String principalWork;
+
+
+    public KerberosPrincipalImpl(String principalName, String principal, String principalWork) {
+        this.principalName = principalName;
+        this.principal = principal;
+        this.principalWork = principalWork;
+    }
+
+    @Override
+    public String getPrincipal() {
+        return this.principal;
+    }
+
+    @Override
+    public String getPrincipalDesc() {
+        return "TENANT:"+this.principal;
+    }
+
+    @Override
+    public File getUserKeytabFile() {
+        File userKeytabFile = new File(principalWork, "user.keytab");
+        return userKeytabFile.exists() ? userKeytabFile : null;
+    }
+
+    @Override
+    public File getKrb5File() {
+        if(SystemUtils.IS_OS_WINDOWS) {
+            // windows krb5File 是 ini 结尾
+            File krb5File = new File(principalWork, "krb5.ini");
+            if(krb5File.exists()) {
+                return krb5File;
+            }
+        }
+
+        // 其他系统,如果win系统不存在ini也以 conf 检测一次
+        File krb5File = new File(principalWork, "krb5.conf");
+        return krb5File.exists() ? krb5File : null;
+    }
+
+    @Override
+    public File getHiveClientFile() {
+        File hiveClientPropFile = new File(principalWork, "hiveclient.properties");
+        return hiveClientPropFile.exists() ? hiveClientPropFile : null;
+    }
+
+    @Override
+    public File getCoreSite() {
+        File coreSiteFile = new File(principalWork, "core-site.xml");
+        return coreSiteFile.exists() ? coreSiteFile : null;
+    }
+
+    @Override
+    public File getHdfsSite() {
+        File hdfsSiteFile = new File(principalWork, "hdfs-site.xml");
+        return hdfsSiteFile.exists() ? hdfsSiteFile : null;
+    }
+
+    @Override
+    public File getHiveSite() {
+        File hiveSiteFile = new File(principalWork, "hive-site.xml");
+        return hiveSiteFile.exists() ? hiveSiteFile : null;
+    }
+
+    @Override
+    public File getHBaseSite() {
+        File hbaseSiteFile = new File(principalWork, "hbase-site.xml");
+        return hbaseSiteFile.exists() ? hbaseSiteFile : null;
+    }
+}

+ 102 - 0
hadoop-auth/FIHiveConnectionServiceImpl.java

@@ -0,0 +1,102 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+
+import com.primeton.dsp.datarelease.server.model.DspHiveResource;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.Set;
+
+/**
+ *
+ * Hive Kerberos 认证方式获得连接
+ *
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/22
+ * Time: 18:02
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Slf4j
+public class FIHiveConnectionServiceImpl implements HiveConnectionService {
+
+
+    /**
+     * Hive 数据源
+     */
+    final DspHiveResource hiveResource;
+
+
+
+
+    private HiveHelper hiveHelper;
+
+
+
+    public FIHiveConnectionServiceImpl(DspHiveResource hiveResource) {
+        this.hiveResource = hiveResource;
+    }
+
+    @Override
+    public boolean doAuth() {
+        // 认证传过来
+        AuthPrincipalCreator authPrincipalCreator = AuthPrincipalCreator.useDataReleaseConf(hiveResource.getAuthBasePath());
+        Set<String> principals = authPrincipalCreator.listPrincipals();
+        log.info("find existed principals: {}", principals);
+        AuthPrincipal kerberosPrincipal = authPrincipalCreator.getKerberosPrincipal(hiveResource.getHiveDbUser());
+
+        String userKeytabFile = kerberosPrincipal.getUserKeytabFile().getAbsolutePath();
+        String krb5File = kerberosPrincipal.getKrb5File().getAbsolutePath();
+        String krbUser = kerberosPrincipal.getPrincipal();
+        String hiveclientPropFile = kerberosPrincipal.getHiveClientFile().getAbsolutePath();
+
+        // 分别加载 core、hdfs、hive site 文件
+        Configuration conf = new Configuration();
+
+        try {
+            if (kerberosPrincipal.getCoreSite() != null) {
+                conf.addResource(kerberosPrincipal.getCoreSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getCoreSite().getAbsolutePath());
+            }
+
+            if (kerberosPrincipal.getHdfsSite() != null) {
+                conf.addResource(kerberosPrincipal.getHdfsSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getHdfsSite().getAbsolutePath());
+            }
+
+            if (kerberosPrincipal.getHiveSite() != null) {
+                conf.addResource(kerberosPrincipal.getHiveSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getHiveSite().getAbsolutePath());
+            }
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+
+        try {
+            this.hiveHelper = new HiveHelper(conf, hiveclientPropFile, krbUser, userKeytabFile, krb5File);
+            log.info("hive fusioninsight 认证通过。");
+            return true;
+        } catch (Exception e) {
+            throw new SecurityException("FI 认证失败。", e);
+        }
+    }
+
+    @Override
+    public Connection getConnection() throws SQLException {
+        try {
+            Class.forName("org.apache.hive.jdbc.HiveDriver");
+        } catch (ClassNotFoundException e) {
+            throw new SQLException("找不到Hive驱动:org.apache.hive.jdbc.HiveDriver.", e);
+        }
+        return hiveHelper.getPoolConnection();
+    }
+}

+ 141 - 0
hadoop-auth/FiHBaseConnectionServiceImpl.java

@@ -0,0 +1,141 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+
+import com.primeton.dsp.datarelease.server.model.DspHbaseResource;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.exceptions.HBaseException;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Set;
+
+/**
+ *
+ * 华为 FI  HBASE 认证方式获得连接
+ *
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/22
+ * Time: 18:02
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Slf4j
+public class FiHBaseConnectionServiceImpl implements HBaseConnectionService, Closeable {
+
+
+
+    private static final String ZOOKEEPER_DEFAULT_LOGIN_CONTEXT_NAME = "Client";
+    private static final String ZOOKEEPER_SERVER_PRINCIPAL_KEY = "zookeeper.server.principal";
+    private static final String ZOOKEEPER_DEFAULT_SERVER_PRINCIPAL = "zookeeper/hadoop.hadoop.com";
+
+
+    /**
+     * Hive 数据源
+     */
+    final DspHbaseResource hbaseResource;
+
+    /**
+     * HBase 链接
+     */
+    Connection connection;
+
+
+    public FiHBaseConnectionServiceImpl(DspHbaseResource hbaseResource) {
+        this.hbaseResource = hbaseResource;
+    }
+
+    @Override
+    public boolean doAuth() {
+        //KrbUser = "hadoop/cdh-node1@HADOOP.COM";
+        AuthPrincipalCreator authPrincipalCreator = AuthPrincipalCreator.useDataReleaseConf(hbaseResource.getAuthBasePath());
+        Set<String> principals = authPrincipalCreator.listPrincipals();
+        log.info("find existed principals: {}", principals);
+        AuthPrincipal kerberosPrincipal = authPrincipalCreator.getKerberosPrincipal(hbaseResource.getAuthUser());
+
+        String userKeytab = kerberosPrincipal.getUserKeytabFile().getAbsolutePath();
+        String krb5File = kerberosPrincipal.getKrb5File().getAbsolutePath();
+        String krbUser = kerberosPrincipal.getPrincipal();
+
+        // 分别加载 core、hdfs、hbase site 文件
+        Configuration conf = new Configuration();
+        try {
+            if (kerberosPrincipal.getCoreSite() != null) {
+                conf.addResource(kerberosPrincipal.getCoreSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getCoreSite().getAbsolutePath());
+            }
+
+            if (kerberosPrincipal.getHdfsSite() != null) {
+                conf.addResource(kerberosPrincipal.getHdfsSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getHdfsSite().getAbsolutePath());
+            }
+
+            conf.reloadConfiguration();
+
+            Configuration hbaseConf = HBaseConfiguration.create(conf);
+            if (kerberosPrincipal.getHBaseSite() != null) {
+                hbaseConf.addResource(kerberosPrincipal.getHBaseSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getHBaseSite().getAbsolutePath());
+            }
+            hbaseConf.reloadConfiguration();
+            /*
+             * Huawei Fi Hbase,认证
+             *
+             * if need to connect zk, please provide jaas info about zk. of course,
+             * you can do it as below:
+             * System.setProperty("java.security.auth.login.config", confDirPath +
+             * "jaas.conf"); but the demo can help you more : Note: if this process
+             * will connect more than one zk cluster, the demo may be not proper. you
+             * can contact us for more help
+             */
+
+            LoginUtil.setJaasConf(ZOOKEEPER_DEFAULT_LOGIN_CONTEXT_NAME, krbUser, userKeytab);
+            LoginUtil.setZookeeperServerPrincipal(ZOOKEEPER_SERVER_PRINCIPAL_KEY,
+                    ZOOKEEPER_DEFAULT_SERVER_PRINCIPAL);
+            LoginUtil.login(krbUser, userKeytab, krb5File, hbaseConf);
+            connection = ConnectionFactory.createConnection(hbaseConf);
+            log.info("fi hbase kerberos 认证通过。");
+            return true;
+        } catch (Exception e) {
+            throw new SecurityException(e);
+        }
+    }
+
+    @Override
+    public Admin getConnection() throws HBaseException {
+        try {
+            return connection.getAdmin();
+        } catch (Exception e) {
+            throw new HBaseException("连接 HBase 异常。", e);
+        }
+    }
+
+    @Override
+    public Table getTable(String tableName) throws HBaseException {
+        try {
+            return connection.getTable(TableName.valueOf(tableName));
+        } catch (IOException e) {
+            throw new HBaseException("无法获取Hbase " + tableName + " 表链接。", e);
+        }
+    }
+
+    @Override
+    public void close() throws IOException {
+        log.info("关闭 HBase 连接。");
+        if(connection != null) {
+            connection.close();
+        }
+    }
+}

+ 43 - 0
hadoop-auth/HBaseConnectionFactory.java

@@ -0,0 +1,43 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+import com.primeton.dsp.datarelease.server.model.DspHbaseResource;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang.StringUtils;
+
+/**
+ *
+ * 根据参数不同,生成不动的测试连接实例
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/21
+ * Time: 18:02
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Slf4j
+public class HBaseConnectionFactory {
+
+
+    /**
+     * 生成不同的测试实例
+     * @return
+     */
+    public static HBaseConnectionService getHBaseInstance(DspHbaseResource hbaseResource) {
+        String authUser = hbaseResource.getAuthUser();
+        if(StringUtils.isBlank(authUser) || "noauth".equalsIgnoreCase(hbaseResource.getAuthType())) {
+            // 无需认证
+            return new SimpleHBaseConnectionServiceImpl(hbaseResource);
+        } else if("kerberos".equalsIgnoreCase(hbaseResource.getAuthType())){
+            // kerberos 认证
+            return new Krb5HBaseConnectionServiceImpl(hbaseResource);
+        } else {
+            //fi 华为
+            return new FiHBaseConnectionServiceImpl(hbaseResource);
+        }
+    }
+}

+ 45 - 0
hadoop-auth/HBaseConnectionService.java

@@ -0,0 +1,45 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.exceptions.HBaseException;
+
+
+/**
+ * HBase 测试连接
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/21
+ * Time: 17:59
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public interface HBaseConnectionService {
+
+
+    /**
+     * 安全处理,认证. 认证不通过可能抛出 SecurityException
+     * @return 返回 true 认证通过
+     */
+    default public boolean doAuth() { return true; };
+
+
+    /**
+     * 获取 HBaseAdmin 连接
+     * @return
+     */
+    public Admin getConnection() throws HBaseException;
+
+
+    /**
+     * 获取HBase 指定表的链接
+     * @return
+     * @throws HBaseException
+     */
+    public Table getTable(String tableName) throws HBaseException;
+}

+ 47 - 0
hadoop-auth/HiveConnectionFactory.java

@@ -0,0 +1,47 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+import com.primeton.dsp.datarelease.server.model.DspHiveResource;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang.StringUtils;
+
+/**
+ *
+ * 根据参数不同,生成不动的测试连接实例
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/21
+ * Time: 18:02
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Slf4j
+public class HiveConnectionFactory {
+
+
+    /**
+     * 生成不同的测试实例
+     * @return
+     */
+    public static HiveConnectionService getHiveInstance(DspHiveResource hiveResource) {
+	    String type = hiveResource.getCollectionType();
+        String authType = hiveResource.getAuthType();
+        String hiveDbUser = hiveResource.getHiveDbUser();
+        if(StringUtils.isBlank(hiveDbUser) || "noauth".equalsIgnoreCase(authType)) {
+            // 默认无认证的方式测试
+            return new SimpleHiveConnectionServiceImpl(hiveResource);
+        } else if ("kerberos".equalsIgnoreCase(authType) && StringUtils.isNotBlank(hiveDbUser)) {
+            return new Krb5HiveConnectionServiceImpl(hiveResource);
+        } else if ("fi".equalsIgnoreCase(authType) && StringUtils.isNotBlank(hiveDbUser)) {
+            return new FIHiveConnectionServiceImpl(hiveResource);
+        }
+
+        // 未知的方式,只能通过简单的来处理一下了。
+        log.info("未知的Hive认证方式:" + authType);
+        return new SimpleHiveConnectionServiceImpl(hiveResource);
+    }
+}

+ 35 - 0
hadoop-auth/HiveConnectionService.java

@@ -0,0 +1,35 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+
+/**
+ * hive 测试连接
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/21
+ * Time: 17:59
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public interface HiveConnectionService {
+
+
+    /**
+     * 安全处理,认证. 认证不通过可能抛出 SecurityException
+     * @return 返回 true 认证通过
+     */
+    default public boolean doAuth() { return true; };
+
+
+    /**
+     * 获取一个连接
+     * @return
+     */
+    public Connection getConnection() throws SQLException;
+}

+ 178 - 0
hadoop-auth/HiveHelper.java

@@ -0,0 +1,178 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+
+
+import lombok.NonNull;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hadoop.conf.Configuration;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Deque;
+import java.util.List;
+import java.util.Properties;
+
+
+/**
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/20
+ * Time: 17:56
+ *
+ * </pre>
+ *
+ * @author zhaopx merge
+ */
+@Slf4j
+public class HiveHelper {
+    private static final String ZOOKEEPER_DEFAULT_LOGIN_CONTEXT_NAME = "Client";
+    private static final String ZOOKEEPER_SERVER_PRINCIPAL_KEY = "zookeeper.server.principal";
+    private static final String ZOOKEEPER_DEFAULT_SERVER_PRINCIPAL = "zookeeper/hadoop";
+
+    private String hiveclientPropFile = null;
+    private String krb5File = null;
+    private String userName = null;
+    private String userKeytabFile = null;
+
+    private String url = null;
+
+    private String zkQuorum = null;// zookeeper节点ip和端口列表
+    private String auth = null;
+    private String sasl_qop = null;
+    private String zooKeeperNamespace = null;
+    private String serviceDiscoveryMode = null;
+    private String principal = null;
+
+    private Deque<Connection> pools = new ArrayDeque<Connection>();
+    private List<Connection> allConnection = new ArrayList<Connection>();
+
+
+    private final Configuration conf;
+
+    public HiveHelper(@NonNull Configuration conf,
+                      @NonNull String hiveclientPropFile,
+                      String userName,
+                      String userKeytabFile,
+                      String krb5File) throws IOException {
+        this.conf = conf;
+        this.hiveclientPropFile = hiveclientPropFile;
+        this.userName = userName;
+        this.userKeytabFile = userKeytabFile;
+        this.krb5File = krb5File;
+
+        this.init();
+    }
+
+    private void init() throws IOException {
+        Properties clientInfo = null;
+        InputStream fileInputStream = null;
+        try {
+            clientInfo = new Properties();
+            // "hiveclient.properties"为客户端配置文件,如果使用多实例特性,需要把该文件换成对应实例客户端下的"hiveclient.properties"
+            // "hiveclient.properties"文件位置在对应实例客户端安裝包解压目录下的config目录下
+            File propertiesFile = new File(this.hiveclientPropFile);
+            fileInputStream = new FileInputStream(propertiesFile);
+            clientInfo.load(fileInputStream);
+        } catch (Exception e) {
+            throw new IOException(e);
+        } finally {
+            if (fileInputStream != null) {
+                fileInputStream.close();
+                fileInputStream = null;
+            }
+        }
+        // zkQuorum获取后的格式为"xxx.xxx.xxx.xxx:24002,xxx.xxx.xxx.xxx:24002,xxx.xxx.xxx.xxx:24002";
+        // "xxx.xxx.xxx.xxx"为集群中ZooKeeper所在节点的业务IP,端口默认是24002
+        zkQuorum = clientInfo.getProperty("zk.quorum");
+        auth = clientInfo.getProperty("auth");
+        sasl_qop = clientInfo.getProperty("sasl.qop");
+        zooKeeperNamespace = clientInfo.getProperty("zooKeeperNamespace");
+        serviceDiscoveryMode = clientInfo.getProperty("serviceDiscoveryMode");
+        principal = clientInfo.getProperty("principal");
+
+        // 拼接JDBC URL
+        StringBuilder sBuilder = new StringBuilder("jdbc:hive2://").append(zkQuorum).append("/");
+        if ("KERBEROS".equalsIgnoreCase(auth)) {
+            // 设置客户端的keytab和krb5文件路径
+
+            LoginUtil.setJaasConf(ZOOKEEPER_DEFAULT_LOGIN_CONTEXT_NAME, this.userName, this.userKeytabFile);
+            LoginUtil.setZookeeperServerPrincipal(ZOOKEEPER_SERVER_PRINCIPAL_KEY, ZOOKEEPER_DEFAULT_SERVER_PRINCIPAL);
+
+            // 安全模式
+            this.conf.set("hadoop.security.authentication", "kerberos");
+            // Zookeeper登录认证
+            LoginUtil.login(this.userName, this.userKeytabFile, this.krb5File, this.conf);
+
+            sBuilder.append(";serviceDiscoveryMode=").append(serviceDiscoveryMode).append(";zooKeeperNamespace=")
+                    .append(zooKeeperNamespace).append(";sasl.qop=").append(sasl_qop).append(";auth=").append(auth)
+                    .append(";principal=").append(principal).append(";");
+        } else {
+            // 普通模式
+            sBuilder.append(";serviceDiscoveryMode=").append(serviceDiscoveryMode).append(";zooKeeperNamespace=")
+                    .append(zooKeeperNamespace).append(";auth=none");
+        }
+
+        this.url = sBuilder.toString();
+    }
+
+    public Connection getConnection() throws SQLException {
+        return DriverManager.getConnection(url, "", "");
+    }
+
+    public synchronized Connection getPoolConnection() throws SQLException{
+        if(!pools.isEmpty()) {
+            return pools.removeLast();
+        }
+
+        Connection conn = getConnection();
+        allConnection.add(conn);
+
+        return conn;
+    }
+
+    public synchronized void returnConnection(Connection conn) {
+        pools.addFirst(conn);
+    }
+
+    public synchronized void closePoolConnection() {
+        for(Connection conn : allConnection) {
+            close(conn);
+        }
+    }
+
+    public static void close(Connection conn) {
+        if(conn != null) {
+            try {
+                conn.close();
+            } catch (SQLException e) {
+                log.error("关闭Hive连接失败.", e);
+            }
+        }
+    }
+
+    public static void close(Statement statment, ResultSet rs) {
+        if(rs != null) {
+            try {
+                rs.close();
+            } catch (SQLException e) {}
+        }
+
+        if(statment != null) {
+            try {
+                statment.close();
+            } catch (SQLException e) {}
+        }
+    }
+
+}

+ 38 - 0
hadoop-auth/KerberosUtil.java

@@ -0,0 +1,38 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import java.io.IOException;
+
+
+/**
+ *
+ * Kerberos 认证
+ *
+ * @author zhaopx
+ */
+@Slf4j
+public class KerberosUtil {
+
+
+    /**
+     * 开始登陆,如果登陆失败抛出 SecurityException
+     * @param conf
+     * @param principal
+     * @param userKeytabFile
+     * @param krb5File
+     */
+    public static void loginKerberos(Configuration conf, String principal, String userKeytabFile, String krb5File) {
+        System.setProperty("java.security.krb5.conf", krb5File);
+        try {
+            UserGroupInformation.setConfiguration(conf);
+            UserGroupInformation.loginUserFromKeytab(principal, userKeytabFile);
+            log.info("kerberos 认证成功!");
+        } catch (IOException e) {
+            log.error("kerberos 认证失败!", e);
+            throw new SecurityException("kerberos 认证失败!", e);
+        }
+    }
+}

+ 122 - 0
hadoop-auth/Krb5HBaseConnectionServiceImpl.java

@@ -0,0 +1,122 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+
+import com.primeton.dsp.datarelease.server.model.DspHbaseResource;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.exceptions.HBaseException;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Set;
+
+/**
+ *
+ * Hive Kerberos 认证方式获得连接
+ *
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/22
+ * Time: 18:02
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Slf4j
+public class Krb5HBaseConnectionServiceImpl implements HBaseConnectionService, Closeable {
+
+
+    /**
+     * Hive 数据源
+     */
+    final DspHbaseResource hbaseResource;
+
+    /**
+     * HBase 链接
+     */
+    Connection connection;
+
+
+    public Krb5HBaseConnectionServiceImpl(DspHbaseResource hbaseResource) {
+        this.hbaseResource = hbaseResource;
+    }
+
+    @Override
+    public boolean doAuth() {
+        //KrbUser = "hadoop/cdh-node1@HADOOP.COM";
+        AuthPrincipalCreator authPrincipalCreator = AuthPrincipalCreator.useDataReleaseConf(hbaseResource.getAuthBasePath());
+        Set<String> principals = authPrincipalCreator.listPrincipals();
+        log.info("find existed principals: {}", principals);
+        AuthPrincipal kerberosPrincipal = authPrincipalCreator.getKerberosPrincipal(hbaseResource.getAuthUser());
+
+        String userKeytab = kerberosPrincipal.getUserKeytabFile().getAbsolutePath();
+        String krb5File = kerberosPrincipal.getKrb5File().getAbsolutePath();
+        String krbUser = kerberosPrincipal.getPrincipal();
+
+        // 分别加载 core、hdfs、hbase site 文件
+        Configuration conf = new Configuration();
+        try {
+            if (kerberosPrincipal.getCoreSite() != null) {
+                conf.addResource(kerberosPrincipal.getCoreSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getCoreSite().getAbsolutePath());
+            }
+
+            if (kerberosPrincipal.getHdfsSite() != null) {
+                conf.addResource(kerberosPrincipal.getHdfsSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getHdfsSite().getAbsolutePath());
+            }
+
+            conf.reloadConfiguration();
+            Configuration hbaseConf = HBaseConfiguration.create(conf);
+            if (kerberosPrincipal.getHBaseSite() != null) {
+                hbaseConf.addResource(kerberosPrincipal.getHBaseSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getHBaseSite().getAbsolutePath());
+            }
+            hbaseConf.reloadConfiguration();
+
+            // Kerberos 认证
+            KerberosUtil.loginKerberos(hbaseConf, krbUser, userKeytab, krb5File);
+            connection = ConnectionFactory.createConnection(hbaseConf);
+            log.info("hbase kerberos 认证通过。");
+            return true;
+        } catch (Exception e) {
+            throw new SecurityException("HBase Kerberos 认证异常。", e);
+        }
+    }
+
+    @Override
+    public Admin getConnection() throws HBaseException {
+        try {
+            return connection.getAdmin();
+        } catch (Exception e) {
+            throw new HBaseException("连接 HBase 异常。", e);
+        }
+    }
+
+    @Override
+    public Table getTable(String tableName) throws HBaseException {
+        try {
+            return connection.getTable(TableName.valueOf(tableName));
+        } catch (IOException e) {
+            throw new HBaseException("无法获取Hbase " + tableName + " 表链接。", e);
+        }
+    }
+
+    @Override
+    public void close() throws IOException {
+        log.info("关闭 HBase 连接。");
+        if(connection != null) {
+            connection.close();
+        }
+    }
+}

+ 100 - 0
hadoop-auth/Krb5HiveConnectionServiceImpl.java

@@ -0,0 +1,100 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+
+import com.primeton.dsp.datarelease.server.model.DspHiveResource;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.util.Set;
+
+/**
+ *
+ * Hive Kerberos 认证方式获得连接
+ *
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/22
+ * Time: 18:02
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Slf4j
+public class Krb5HiveConnectionServiceImpl implements HiveConnectionService {
+
+
+    /**
+     * Hive 数据源
+     */
+    final DspHiveResource hiveResource;
+
+
+
+    String hiveUrl;
+
+    public Krb5HiveConnectionServiceImpl(DspHiveResource hiveResource) {
+        this.hiveResource = hiveResource;
+    }
+
+    @Override
+    public boolean doAuth() {
+        //KrbUser = "hadoop/cdh-node1@HADOOP.COM";
+        // 认证传过来
+        AuthPrincipalCreator authPrincipalCreator = AuthPrincipalCreator.useDataReleaseConf(hiveResource.getAuthBasePath());
+        Set<String> principals = authPrincipalCreator.listPrincipals();
+        log.info("find existed principals: {}", principals);
+        AuthPrincipal kerberosPrincipal = authPrincipalCreator.getKerberosPrincipal(hiveResource.getHiveDbUser());
+
+        String userKeytab = kerberosPrincipal.getUserKeytabFile().getAbsolutePath();
+        String krb5File = kerberosPrincipal.getKrb5File().getAbsolutePath();
+        String krbUser = kerberosPrincipal.getPrincipal();
+        StringBuffer buffer = new StringBuffer(hiveResource.getHiveUrl());
+        buffer.append(";principal=").append(krbUser);
+        hiveUrl = buffer.toString();
+        log.info("HIVE_URL : " + hiveUrl);
+
+        // 分别加载 core、hdfs、hive site 文件
+        Configuration conf = new Configuration();
+        try {
+            if (kerberosPrincipal.getCoreSite() != null) {
+                conf.addResource(kerberosPrincipal.getCoreSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getCoreSite().getAbsolutePath());
+            }
+
+            if (kerberosPrincipal.getHdfsSite() != null) {
+                conf.addResource(kerberosPrincipal.getHdfsSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getHdfsSite().getAbsolutePath());
+            }
+
+            if (kerberosPrincipal.getHiveSite() != null) {
+                conf.addResource(kerberosPrincipal.getHiveSite().toURL());
+                log.info("add config: {}", kerberosPrincipal.getHiveSite().getAbsolutePath());
+            }
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+
+        // Kerberos 认证
+        KerberosUtil.loginKerberos(conf, krbUser, userKeytab, krb5File);
+        log.info("hive kerberos 认证通过。");
+        return true;
+    }
+
+    @Override
+    public Connection getConnection() throws SQLException {
+        try {
+            Class.forName("org.apache.hive.jdbc.HiveDriver");
+        } catch (ClassNotFoundException e) {
+            throw new SQLException("找不到Hive驱动:org.apache.hive.jdbc.HiveDriver.", e);
+        }
+        return DriverManager.getConnection(hiveUrl, "", "");
+    }
+}

+ 554 - 0
hadoop-auth/LoginUtil.java

@@ -0,0 +1,554 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.util.KerberosUtil;
+import org.apache.log4j.Logger;
+
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ *
+ * Hadoop 认证实现类
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/20
+ * Time: 17:57
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public class LoginUtil {
+
+    public enum Module {
+        STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client");
+
+        private String name;
+
+        private Module(String name) {
+            this.name = name;
+        }
+
+        public String getName() {
+            return name;
+        }
+    }
+
+    private static final Logger LOG = Logger.getLogger(LoginUtil.class);
+
+    /**
+     * line operator string
+     */
+    private static final String LINE_SEPARATOR = System
+            .getProperty("line.separator");
+
+    /**
+     * jaas file postfix
+     */
+    private static final String JAAS_POSTFIX = ".jaas.conf";
+
+    /**
+     * IBM jdk login module
+     */
+    private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required";
+
+    /**
+     * oracle jdk login module
+     */
+    private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required";
+
+    /**
+     * java security login file path
+     */
+    public static final String JAVA_SECURITY_LOGIN_CONF_KEY = "java.security.auth.login.config";
+
+    private static final String JAVA_SECURITY_KRB5_CONF_KEY = "java.security.krb5.conf";
+
+    private static final String ZOOKEEPER_SERVER_PRINCIPAL_KEY = "zookeeper.server.principal";
+
+    private static final String LOGIN_FAILED_CAUSE_PASSWORD_WRONG = "(wrong password) keytab file and user not match, you can kinit -k -t keytab user in client server to check";
+
+    private static final String LOGIN_FAILED_CAUSE_TIME_WRONG = "(clock skew) time of local server and remote server not match, please check ntp to remote server";
+
+    private static final String LOGIN_FAILED_CAUSE_AES256_WRONG = "(aes256 not support) aes256 not support by default jdk/jre, need copy local_policy.jar and US_export_policy.jar from remote server in path /opt/Bigdata/jdk/jre/lib/security";
+
+    private static final String LOGIN_FAILED_CAUSE_PRINCIPAL_WRONG = "(no rule) principal format not support by default, need add property hadoop.security.auth_to_local(in core-site.xml) value RULE:[1:$1] RULE:[2:$1]";
+
+    private static final String LOGIN_FAILED_CAUSE_TIME_OUT = "(time out) can not connect to kdc server or there is fire wall in the network";
+
+    private static final boolean IS_IBM_JDK = System.getProperty("java.vendor")
+            .contains("IBM");
+
+    public synchronized static void login(String userPrincipal,
+                                          String userKeytabPath, String krb5ConfPath, Configuration conf)
+            throws IOException {
+        // 1.check input parameters
+        if ((userPrincipal == null) || (userPrincipal.length() <= 0)) {
+            LOG.error("input userPrincipal is invalid.");
+            throw new IOException("input userPrincipal is invalid.");
+        }
+
+        if ((userKeytabPath == null) || (userKeytabPath.length() <= 0)) {
+            LOG.error("input userKeytabPath is invalid.");
+            throw new IOException("input userKeytabPath is invalid.");
+        }
+
+        if ((krb5ConfPath == null) || (krb5ConfPath.length() <= 0)) {
+            LOG.error("input krb5ConfPath is invalid.");
+            throw new IOException("input krb5ConfPath is invalid.");
+        }
+
+        if ((conf == null)) {
+            LOG.error("input conf is invalid.");
+            throw new IOException("input conf is invalid.");
+        }
+
+        // 2.check file exsits
+        File userKeytabFile = new File(userKeytabPath);
+        if (!userKeytabFile.exists()) {
+            LOG.error("userKeytabFile(" + userKeytabFile.getAbsolutePath()
+                    + ") does not exsit.");
+            throw new IOException("userKeytabFile("
+                    + userKeytabFile.getAbsolutePath() + ") does not exsit.");
+        }
+        if (!userKeytabFile.isFile()) {
+            LOG.error("userKeytabFile(" + userKeytabFile.getAbsolutePath()
+                    + ") is not a file.");
+            throw new IOException("userKeytabFile("
+                    + userKeytabFile.getAbsolutePath() + ") is not a file.");
+        }
+
+        File krb5ConfFile = new File(krb5ConfPath);
+        if (!krb5ConfFile.exists()) {
+            LOG.error("krb5ConfFile(" + krb5ConfFile.getAbsolutePath()
+                    + ") does not exsit.");
+            throw new IOException("krb5ConfFile("
+                    + krb5ConfFile.getAbsolutePath() + ") does not exsit.");
+        }
+        if (!krb5ConfFile.isFile()) {
+            LOG.error("krb5ConfFile(" + krb5ConfFile.getAbsolutePath()
+                    + ") is not a file.");
+            throw new IOException("krb5ConfFile("
+                    + krb5ConfFile.getAbsolutePath() + ") is not a file.");
+        }
+
+        // 3.set and check krb5config
+        setKrb5Config(krb5ConfFile.getAbsolutePath());
+        setConfiguration(conf);
+
+        // 4.login and check for hadoop
+        loginHadoop(userPrincipal, userKeytabFile.getAbsolutePath());
+        LOG.info("Login success!!!!!!!!!!!!!!");
+    }
+
+    private static void setConfiguration(Configuration conf) throws IOException {
+        UserGroupInformation.setConfiguration(conf);
+    }
+
+    private static boolean checkNeedLogin(String principal) throws IOException {
+        if (!UserGroupInformation.isSecurityEnabled()) {
+            LOG.error("UserGroupInformation is not SecurityEnabled, please check if core-site.xml exists in classpath.");
+            throw new IOException(
+                    "UserGroupInformation is not SecurityEnabled, please check if core-site.xml exists in classpath.");
+        }
+        UserGroupInformation currentUser = UserGroupInformation
+                .getCurrentUser();
+        if ((currentUser != null) && (currentUser.hasKerberosCredentials())) {
+            if (checkCurrentUserCorrect(principal)) {
+                LOG.info("current user is " + currentUser + "has logined.");
+                if (!currentUser.isFromKeytab()) {
+                    LOG.error("current user is not from keytab.");
+                    throw new IOException("current user is not from keytab.");
+                }
+                return false;
+            } else {
+                LOG.error("current user is "
+                        + currentUser
+                        + "has logined. please check your enviroment , especially when it used IBM JDK or kerberos for OS count login!!");
+                throw new IOException("current user is " + currentUser
+                        + " has logined. And please check your enviroment!!");
+            }
+        }
+
+        return true;
+    }
+
+    public static void setKrb5Config(String krb5ConfFile) throws IOException {
+        System.setProperty(JAVA_SECURITY_KRB5_CONF_KEY, krb5ConfFile);
+        String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF_KEY);
+        if (ret == null) {
+            LOG.error(JAVA_SECURITY_KRB5_CONF_KEY + " is null.");
+            throw new IOException(JAVA_SECURITY_KRB5_CONF_KEY + " is null.");
+        }
+        if (!ret.equals(krb5ConfFile)) {
+            LOG.error(JAVA_SECURITY_KRB5_CONF_KEY + " is " + ret + " is not "
+                    + krb5ConfFile + ".");
+            throw new IOException(JAVA_SECURITY_KRB5_CONF_KEY + " is " + ret
+                    + " is not " + krb5ConfFile + ".");
+        }
+    }
+
+    public static void setJaasFile(String principal, String keytabPath)
+            throws IOException {
+        String jaasPath = new File(System.getProperty("java.io.tmpdir"))
+                + File.separator + System.getProperty("user.name")
+                + JAAS_POSTFIX;
+
+        // windows路径下分隔符替换
+        jaasPath = jaasPath.replace("\\", "\\\\");
+        keytabPath = keytabPath.replace("\\", "\\\\");
+        // 删除jaas文件
+        deleteJaasFile(jaasPath);
+        writeJaasFile(jaasPath, principal, keytabPath);
+        System.setProperty(JAVA_SECURITY_LOGIN_CONF_KEY, jaasPath);
+    }
+
+    private static void writeJaasFile(String jaasPath, String principal,
+                                      String keytabPath) throws IOException {
+        FileWriter writer = new FileWriter(new File(jaasPath));
+        try {
+            writer.write(getJaasConfContext(principal, keytabPath));
+            writer.flush();
+        } catch (IOException e) {
+            throw new IOException("Failed to create jaas.conf File");
+        } finally {
+            writer.close();
+        }
+    }
+
+    private static void deleteJaasFile(String jaasPath) throws IOException {
+        File jaasFile = new File(jaasPath);
+        if (jaasFile.exists()) {
+            if (!jaasFile.delete()) {
+                throw new IOException("Failed to delete exists jaas file.");
+            }
+        }
+    }
+
+    private static String getJaasConfContext(String principal, String keytabPath) {
+        Module[] allModule = Module.values();
+        StringBuilder builder = new StringBuilder();
+        for (Module modlue : allModule) {
+            builder.append(getModuleContext(principal, keytabPath, modlue));
+        }
+        return builder.toString();
+    }
+
+    private static String getModuleContext(String userPrincipal,
+                                           String keyTabPath, Module module) {
+        StringBuilder builder = new StringBuilder();
+        if (IS_IBM_JDK) {
+            builder.append(module.getName()).append(" {")
+                    .append(LINE_SEPARATOR);
+            builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR);
+            builder.append("credsType=both").append(LINE_SEPARATOR);
+            builder.append("principal=\"" + userPrincipal + "\"").append(
+                    LINE_SEPARATOR);
+            builder.append("useKeytab=\"" + keyTabPath + "\"").append(
+                    LINE_SEPARATOR);
+            builder.append("debug=true;").append(LINE_SEPARATOR);
+            builder.append("};").append(LINE_SEPARATOR);
+        } else {
+            builder.append(module.getName()).append(" {")
+                    .append(LINE_SEPARATOR);
+            builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR);
+            builder.append("useKeyTab=true").append(LINE_SEPARATOR);
+            builder.append("keyTab=\"" + keyTabPath + "\"").append(
+                    LINE_SEPARATOR);
+            builder.append("principal=\"" + userPrincipal + "\"").append(
+                    LINE_SEPARATOR);
+            builder.append("useTicketCache=false").append(LINE_SEPARATOR);
+            builder.append("storeKey=true").append(LINE_SEPARATOR);
+            builder.append("debug=true;").append(LINE_SEPARATOR);
+            builder.append("};").append(LINE_SEPARATOR);
+        }
+
+        return builder.toString();
+    }
+
+    public static void setJaasConf(String loginContextName, String principal,
+                                   String keytabFile) throws IOException {
+        if ((loginContextName == null) || (loginContextName.length() <= 0)) {
+            LOG.error("input loginContextName is invalid.");
+            throw new IOException("input loginContextName is invalid.");
+        }
+
+        if ((principal == null) || (principal.length() <= 0)) {
+            LOG.error("input principal is invalid.");
+            throw new IOException("input principal is invalid.");
+        }
+
+        if ((keytabFile == null) || (keytabFile.length() <= 0)) {
+            LOG.error("input keytabFile is invalid.");
+            throw new IOException("input keytabFile is invalid.");
+        }
+
+        File userKeytabFile = new File(keytabFile);
+        if (!userKeytabFile.exists()) {
+            LOG.error("userKeytabFile(" + userKeytabFile.getAbsolutePath()
+                    + ") does not exsit.");
+            throw new IOException("userKeytabFile("
+                    + userKeytabFile.getAbsolutePath() + ") does not exsit.");
+        }
+
+        javax.security.auth.login.Configuration
+                .setConfiguration(new JaasConfiguration(loginContextName,
+                        principal, userKeytabFile.getAbsolutePath()));
+
+        javax.security.auth.login.Configuration conf = javax.security.auth.login.Configuration
+                .getConfiguration();
+        if (!(conf instanceof JaasConfiguration)) {
+            LOG.error("javax.security.auth.login.Configuration is not JaasConfiguration.");
+            throw new IOException(
+                    "javax.security.auth.login.Configuration is not JaasConfiguration.");
+        }
+
+        AppConfigurationEntry[] entrys = conf
+                .getAppConfigurationEntry(loginContextName);
+        if (entrys == null) {
+            LOG.error("javax.security.auth.login.Configuration has no AppConfigurationEntry named "
+                    + loginContextName + ".");
+            throw new IOException(
+                    "javax.security.auth.login.Configuration has no AppConfigurationEntry named "
+                            + loginContextName + ".");
+        }
+
+        boolean checkPrincipal = false;
+        for (int i = 0; i < entrys.length; i++) {
+            if (entrys[i].getOptions().get("principal").equals(principal)) {
+                checkPrincipal = true;
+            }
+
+        }
+
+        if (!checkPrincipal) {
+            LOG.error("AppConfigurationEntry named " + loginContextName
+                    + " does not have principal value of " + principal + ".");
+            throw new IOException("AppConfigurationEntry named "
+                    + loginContextName + " does not have principal value of "
+                    + principal + ".");
+        }
+
+    }
+
+    public static void setZookeeperServerPrincipal(String zkServerPrincipal)
+            throws IOException {
+        System.setProperty(ZOOKEEPER_SERVER_PRINCIPAL_KEY, zkServerPrincipal);
+        String ret = System.getProperty(ZOOKEEPER_SERVER_PRINCIPAL_KEY);
+        if (ret == null) {
+            LOG.error(ZOOKEEPER_SERVER_PRINCIPAL_KEY + " is null.");
+            throw new IOException(ZOOKEEPER_SERVER_PRINCIPAL_KEY + " is null.");
+        }
+        if (!ret.equals(zkServerPrincipal)) {
+            LOG.error(ZOOKEEPER_SERVER_PRINCIPAL_KEY + " is " + ret
+                    + " is not " + zkServerPrincipal + ".");
+            throw new IOException(ZOOKEEPER_SERVER_PRINCIPAL_KEY + " is " + ret
+                    + " is not " + zkServerPrincipal + ".");
+        }
+    }
+
+    public static void setZookeeperServerPrincipal(String zkServerPrincipalKey,
+                                                   String zkServerPrincipal) throws IOException {
+        System.setProperty(zkServerPrincipalKey, zkServerPrincipal);
+        String ret = System.getProperty(zkServerPrincipalKey);
+        if (ret == null) {
+            LOG.error(zkServerPrincipalKey + " is null.");
+            throw new IOException(zkServerPrincipalKey + " is null.");
+        }
+        if (!ret.equals(zkServerPrincipal)) {
+            LOG.error(zkServerPrincipalKey + " is " + ret + " is not "
+                    + zkServerPrincipal + ".");
+            throw new IOException(zkServerPrincipalKey + " is " + ret
+                    + " is not " + zkServerPrincipal + ".");
+        }
+    }
+
+    private static void loginHadoop(String principal, String keytabFile)
+            throws IOException {
+        try {
+            UserGroupInformation.loginUserFromKeytab(principal, keytabFile);
+        } catch (IOException e) {
+            LOG.error("login failed with " + principal + " and " + keytabFile
+                    + ".");
+            LOG.error("perhaps cause 1 is " + LOGIN_FAILED_CAUSE_PASSWORD_WRONG
+                    + ".");
+            LOG.error("perhaps cause 2 is " + LOGIN_FAILED_CAUSE_TIME_WRONG
+                    + ".");
+            LOG.error("perhaps cause 3 is " + LOGIN_FAILED_CAUSE_AES256_WRONG
+                    + ".");
+            LOG.error("perhaps cause 4 is "
+                    + LOGIN_FAILED_CAUSE_PRINCIPAL_WRONG + ".");
+            LOG.error("perhaps cause 5 is " + LOGIN_FAILED_CAUSE_TIME_OUT + ".");
+
+            throw e;
+        }
+    }
+
+    private static void checkAuthenticateOverKrb() throws IOException {
+        UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
+        UserGroupInformation currentUser = UserGroupInformation
+                .getCurrentUser();
+        if (loginUser == null) {
+            LOG.error("current user is " + currentUser
+                    + ", but loginUser is null.");
+            throw new IOException("current user is " + currentUser
+                    + ", but loginUser is null.");
+        }
+        if (!loginUser.equals(currentUser)) {
+            LOG.error("current user is " + currentUser + ", but loginUser is "
+                    + loginUser + ".");
+            throw new IOException("current user is " + currentUser
+                    + ", but loginUser is " + loginUser + ".");
+        }
+        if (!loginUser.hasKerberosCredentials()) {
+            LOG.error("current user is " + currentUser
+                    + " has no Kerberos Credentials.");
+            throw new IOException("current user is " + currentUser
+                    + " has no Kerberos Credentials.");
+        }
+        if (!UserGroupInformation.isLoginKeytabBased()) {
+            LOG.error("current user is " + currentUser
+                    + " is not Login Keytab Based.");
+            throw new IOException("current user is " + currentUser
+                    + " is not Login Keytab Based.");
+        }
+    }
+
+    private static boolean checkCurrentUserCorrect(String principal)
+            throws IOException {
+        UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+        if (ugi == null) {
+            LOG.error("current user still null.");
+            throw new IOException("current user still null.");
+        }
+
+        String defaultRealm = null;
+        try {
+            defaultRealm = KerberosUtil.getDefaultRealm();
+        } catch (Exception e) {
+            LOG.warn("getDefaultRealm failed.");
+            throw new IOException(e);
+        }
+
+        if ((defaultRealm != null) && (defaultRealm.length() > 0)) {
+            StringBuilder realm = new StringBuilder();
+            StringBuilder principalWithRealm = new StringBuilder();
+            realm.append("@").append(defaultRealm);
+            if (!principal.endsWith(realm.toString())) {
+                principalWithRealm.append(principal).append(realm);
+                principal = principalWithRealm.toString();
+            }
+        }
+
+        return principal.equals(ugi.getUserName());
+    }
+
+    /**
+     * copy from hbase zkutil 0.94&0.98 A JAAS configuration that defines the
+     * login modules that we want to use for login.
+     */
+    private static class JaasConfiguration extends
+            javax.security.auth.login.Configuration {
+        private static final Map<String, String> BASIC_JAAS_OPTIONS = new HashMap<String, String>();
+
+        static {
+            String jaasEnvVar = System.getenv("HBASE_JAAS_DEBUG");
+            if (jaasEnvVar != null && "true".equalsIgnoreCase(jaasEnvVar)) {
+                BASIC_JAAS_OPTIONS.put("debug", "true");
+            }
+        }
+
+        private static final Map<String, String> KEYTAB_KERBEROS_OPTIONS = new HashMap<String, String>();
+
+        static {
+            if (IS_IBM_JDK) {
+                KEYTAB_KERBEROS_OPTIONS.put("credsType", "both");
+            } else {
+                KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true");
+                KEYTAB_KERBEROS_OPTIONS.put("useTicketCache", "false");
+                KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+                KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true");
+            }
+
+            KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
+        }
+
+        private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = new AppConfigurationEntry(
+                KerberosUtil.getKrb5LoginModuleName(),
+                LoginModuleControlFlag.REQUIRED, KEYTAB_KERBEROS_OPTIONS);
+
+        private static final AppConfigurationEntry[] KEYTAB_KERBEROS_CONF = new AppConfigurationEntry[] { KEYTAB_KERBEROS_LOGIN };
+
+        private javax.security.auth.login.Configuration baseConfig;
+
+        private final String loginContextName;
+
+        private final boolean useTicketCache;
+
+        private final String keytabFile;
+
+        private final String principal;
+
+        public JaasConfiguration(String loginContextName, String principal,
+                                 String keytabFile) throws IOException {
+            this(loginContextName, principal, keytabFile, keytabFile == null
+                    || keytabFile.length() == 0);
+        }
+
+        private JaasConfiguration(String loginContextName, String principal,
+                                  String keytabFile, boolean useTicketCache) throws IOException {
+            try {
+                this.baseConfig = javax.security.auth.login.Configuration
+                        .getConfiguration();
+            } catch (SecurityException e) {
+                this.baseConfig = null;
+            }
+            this.loginContextName = loginContextName;
+            this.useTicketCache = useTicketCache;
+            this.keytabFile = keytabFile;
+            this.principal = principal;
+
+            initKerberosOption();
+            LOG.info("JaasConfiguration loginContextName=" + loginContextName
+                    + " principal=" + principal + " useTicketCache="
+                    + useTicketCache + " keytabFile=" + keytabFile);
+        }
+
+        private void initKerberosOption() throws IOException {
+            if (!useTicketCache) {
+                if (IS_IBM_JDK) {
+                    KEYTAB_KERBEROS_OPTIONS.put("useKeytab", keytabFile);
+                } else {
+                    KEYTAB_KERBEROS_OPTIONS.put("keyTab", keytabFile);
+                    KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true");
+                    KEYTAB_KERBEROS_OPTIONS.put("useTicketCache",
+                            useTicketCache ? "true" : "false");
+                }
+            }
+            KEYTAB_KERBEROS_OPTIONS.put("principal", principal);
+        }
+
+        @Override
+        public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
+            if (loginContextName.equals(appName)) {
+                return KEYTAB_KERBEROS_CONF;
+            }
+            if (baseConfig != null) {
+                return baseConfig.getAppConfigurationEntry(appName);
+            }
+            return (null);
+        }
+    }
+}

+ 33 - 0
hadoop-auth/PooledDataSource.java

@@ -0,0 +1,33 @@
+/*******************************************************************************************
+ *	Copyright (c) 2016, zzg.zhou(11039850@qq.com)
+ * 
+ *  Monalisa is free software: you can redistribute it and/or modify
+ *	it under the terms of the GNU Lesser General Public License as published by
+ *	the Free Software Foundation, either version 3 of the License, or
+ *	(at your option) any later version.
+
+ *	This program is distributed in the hope that it will be useful,
+ *	but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *	MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *	GNU Lesser General Public License for more details.
+
+ *	You should have received a copy of the GNU Lesser General Public License
+ *	along with this program.  If not, see <http://www.gnu.org/licenses/>.
+ *******************************************************************************************/
+package com.primeton.dsp.datarelease.data.bdata;
+
+
+import java.io.Closeable;
+import java.util.Properties;
+
+import javax.sql.DataSource;
+
+/**
+ *  @author zhaopx
+ */
+public interface PooledDataSource extends DataSource, Closeable {
+
+	public void setProperties(Properties properties);
+	
+	public void setIdleValidationQuery(int idleInSeconds, String validationQuery);
+}

+ 210 - 0
hadoop-auth/SimpleDataSource.java

@@ -0,0 +1,210 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+import lombok.extern.slf4j.Slf4j;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.Properties;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+
+/**
+ *  
+ * @author zhaopx
+ */
+
+@Slf4j
+public class SimpleDataSource implements PooledDataSource {	 
+
+	private ConcurrentMap<Connection, Date> pool = new ConcurrentHashMap<Connection, Date>();
+	
+
+	private int maxSize;
+	private int minSize;
+	private int waitTime;
+	
+	private Semaphore semaphore;
+
+	private final Properties connProps=new Properties();
+
+
+	private HiveConnectionService connectionService;
+	
+	public SimpleDataSource(HiveConnectionService connectionService, Properties poolProperties) {
+		this.connectionService = connectionService;
+		connProps.putAll(poolProperties);
+
+		maxSize  = Integer.parseInt(poolProperties.getProperty("pool.max", "15"));
+		minSize  = Integer.parseInt(poolProperties.getProperty("pool.min", "3"));
+		waitTime = Integer.parseInt(poolProperties.getProperty("pool.waitTime", "5"));
+		initConnections(poolProperties);
+	}
+
+	private void initConnections(Properties poolProperties) {
+
+		log.info("Initializing simple data source{ pool.max = " + maxSize + ", pool.min = " + minSize + "}");
+		semaphore = new Semaphore(maxSize, false);
+
+		if (minSize > 0 && minSize < maxSize) {
+			try {
+				List<Connection> connections = new ArrayList<Connection>();
+				for (int i = 0; i < minSize; i++) {
+					connections.add(getConnection());
+				}
+				for (Connection conn : connections) {
+					conn.close();
+				}
+			} catch (SQLException e) {
+				throw new RuntimeException(e);
+			}
+		}
+	}
+
+	public void close() throws IOException {
+		Exception ex = null;
+		for (Connection conn : pool.keySet()) {
+			try {
+				conn.close();
+			} catch (Exception e) { ex = e; }
+		}
+		pool.clear();
+		if(ex != null) {
+			throw new IOException(ex);
+		}
+	}
+	
+	private void closeConnection(Connection realConnection) throws SQLException {
+		synchronized (pool) {
+			if (pool.size() <= maxSize) {
+				pool.put(realConnection, new Date());
+				return;
+			}
+		}
+			
+		try {
+			realConnection.close();
+		} finally {
+			semaphore.release();
+		}
+	}
+
+	public Connection getConnection() throws SQLException {
+		return getConnection(null, null);
+	}
+
+	public Connection getConnection(String username, String password) throws SQLException {		 
+		synchronized (pool) {
+			if (!pool.isEmpty()) {
+				Connection realConn = pool.keySet().iterator().next();
+				pool.remove(realConn);
+				  
+				realConn.setAutoCommit(true);
+				 
+				return getProxyConnection(realConn);
+			}
+		}
+		 	
+		try {
+			if(semaphore.tryAcquire(waitTime,TimeUnit.SECONDS)) {
+				return getProxyConnection(getRealConnection(username, password));
+			}else {
+				throw new RuntimeException("Connection pool is full: "+maxSize);
+			}
+		}catch(SQLException e) {
+			semaphore.release();
+			throw e;
+		} catch (InterruptedException e) {
+			throw new RuntimeException(e); 
+		}
+	}
+	
+	private Connection getProxyConnection(final Connection realConnection) {		
+		InvocationHandler handler = new InvocationHandler() {
+			public Object invoke(Object proxy, Method method, Object[] params) throws Exception {
+				Object ret = null;
+				if ("close".equals(method.getName())) {
+					closeConnection(realConnection);
+				}else if ("unwrap".equals(method.getName())) {
+					ret=realConnection;
+				} else {
+					ret = method.invoke(realConnection, params);
+				}
+				return ret;
+			}
+		};
+		return (Connection) Proxy.newProxyInstance(Connection.class.getClassLoader(), new Class[] { Connection.class }, handler);
+	}
+
+	
+
+	protected Connection getRealConnection(String username, String password) throws SQLException {
+		try {
+			return connectionService.getConnection();
+		} catch (Exception e) {
+			throw new RuntimeException(e);
+		}
+	}
+
+	public void setProperties(Properties properties){
+		this.connProps.putAll(properties);
+	}
+	
+	public PrintWriter getLogWriter() throws SQLException {
+		return null;
+	}
+
+	public void setLogWriter(PrintWriter out) throws SQLException {
+	}
+
+	public void setLoginTimeout(int seconds) throws SQLException {
+
+	}
+
+	public int getLoginTimeout() throws SQLException {
+		return 0;
+	}
+
+	public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
+		return null;
+	}
+
+	public <T> T unwrap(Class<T> iface) throws SQLException {
+		return null;
+	}
+
+	public boolean isWrapperFor(Class<?> iface) throws SQLException {
+		return false;
+	}
+
+
+	public void setIdleValidationQuery(int idleInSeconds,String validationQuery){
+		//do noting
+	}
+
+	public int getMaxSize() {
+		return maxSize;
+	}
+
+	public int getMinSize() {
+		return minSize;
+	}
+
+	public int getWaitTime() {
+		return waitTime;
+	}
+
+	public Properties getConnProps() {
+		return connProps;
+	}
+}

+ 140 - 0
hadoop-auth/SimpleHBaseConnectionServiceImpl.java

@@ -0,0 +1,140 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+
+import com.primeton.dsp.datarelease.server.model.DspHbaseResource;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.exceptions.HBaseException;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Set;
+
+/**
+ *
+ * HBase 无认证,连接 ZK 获得连接
+ *
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/21
+ * Time: 18:02
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Slf4j
+public class SimpleHBaseConnectionServiceImpl implements HBaseConnectionService, Closeable {
+
+
+    /**
+     * Hive 数据源
+     */
+    final DspHbaseResource hbaseResource;
+
+
+    /**
+     * HBase 链接
+     */
+    final Connection connection;
+
+
+    public SimpleHBaseConnectionServiceImpl(DspHbaseResource hbaseResource) {
+        this.hbaseResource = hbaseResource;
+
+        Configuration hbaseConf = null;
+        if(StringUtils.isNotBlank(hbaseResource.getAuthUser())) {
+            AuthPrincipalCreator authPrincipalCreator = AuthPrincipalCreator.useDataReleaseConf(hbaseResource.getAuthBasePath());
+            AuthPrincipal kerberosPrincipal = authPrincipalCreator.getKerberosPrincipal(hbaseResource.getAuthUser());
+
+            // 分别加载 core、hdfs、hbase site 文件
+            Configuration conf = new Configuration();
+            try {
+                if (kerberosPrincipal.getCoreSite() != null) {
+                    conf.addResource(kerberosPrincipal.getCoreSite().toURL());
+                    log.info("add config: {}", kerberosPrincipal.getCoreSite().getAbsolutePath());
+                }
+
+                if (kerberosPrincipal.getHdfsSite() != null) {
+                    conf.addResource(kerberosPrincipal.getHdfsSite().toURL());
+                    log.info("add config: {}", kerberosPrincipal.getHdfsSite().getAbsolutePath());
+                }
+
+                conf.reloadConfiguration();
+
+                hbaseConf = HBaseConfiguration.create(conf);
+                if (kerberosPrincipal.getHBaseSite() != null) {
+                    hbaseConf.addResource(kerberosPrincipal.getHBaseSite().toURL());
+                    log.info("add config: {}", kerberosPrincipal.getHBaseSite().getAbsolutePath());
+                }
+                hbaseConf.reloadConfiguration();
+            } catch (Exception e) {
+                throw new IllegalStateException(e);
+            }
+        } else {
+            hbaseConf = getConf();
+        }
+
+        try {
+            this.connection = ConnectionFactory.createConnection(hbaseConf);
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+
+    public Configuration getConf() {
+        Configuration conf = HBaseConfiguration.create(new Configuration()); //配置类
+        if(StringUtils.isNotBlank(hbaseResource.getHbaseMaster())) conf.set("hbase.master", hbaseResource.getHbaseMaster());
+        if(StringUtils.isNotBlank(hbaseResource.getHbaseZookeeperQuorum())) conf.set("hbase.zookeeper.quorum", hbaseResource.getHbaseZookeeperQuorum());
+        if(StringUtils.isNotBlank(hbaseResource.getZookeeperClientPort())) conf.set("hbase.zookeeper.property.clientPort", hbaseResource.getZookeeperClientPort());
+        if(StringUtils.isNotBlank(hbaseResource.getZnode())) conf.set("zookeeper.znode.parent", hbaseResource.getZnode());
+
+        conf.setInt("hbase.client.retries.number", 2);
+
+        return conf;
+
+    }
+
+    @Override
+    public boolean doAuth() {
+        log.info("hbase 无需认证,通过。");
+        return true;
+    }
+
+    @Override
+    public Admin getConnection() throws HBaseException {
+        try {
+            return connection.getAdmin();
+        } catch (Exception e) {
+            throw new HBaseException("连接 HBase 异常。", e);
+        }
+    }
+
+    @Override
+    public Table getTable(String tableName) throws HBaseException {
+        try {
+            return connection.getTable(TableName.valueOf(tableName));
+        } catch (IOException e) {
+            throw new HBaseException("无法获取Hbase " + tableName + " 表链接。", e);
+        }
+    }
+
+    @Override
+    public void close() throws IOException {
+        log.info("关闭 HBase 连接。");
+        if(connection != null) {
+            connection.close();
+        }
+    }
+}

+ 61 - 0
hadoop-auth/SimpleHiveConnectionServiceImpl.java

@@ -0,0 +1,61 @@
+package com.primeton.dsp.datarelease.data.bdata;
+
+
+import com.primeton.dsp.datarelease.server.model.DspHiveResource;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang.StringUtils;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+
+/**
+ *
+ * Hive 无认证,以账号和密码的方式获得连接
+ *
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2020/4/21
+ * Time: 18:02
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Slf4j
+public class SimpleHiveConnectionServiceImpl implements HiveConnectionService {
+
+
+    /**
+     * Hive 数据源
+     */
+    final DspHiveResource hiveResource;
+
+
+    public SimpleHiveConnectionServiceImpl(DspHiveResource hiveResource) {
+        this.hiveResource = hiveResource;
+    }
+
+    @Override
+    public Connection getConnection() throws SQLException {
+        String hUrl = "jdbc:hive2://ip:port/default";
+        if(StringUtils.isNotBlank(hiveResource.getHiveUrl())) {
+            hUrl = hiveResource.getHiveUrl();
+        } else {
+            hUrl = hUrl.replace("ip", hiveResource.getHiveIp());
+            hUrl = hUrl.replace("port", hiveResource.getHivePort()+"");
+            hUrl = hUrl.replace("default", hiveResource.getHiveDbName());
+        }
+
+        log.info("测试连接:{}", hUrl);
+        try {
+            Class.forName("org.apache.hive.jdbc.HiveDriver");
+        } catch (ClassNotFoundException e) {
+            throw new SQLException("找不到Hive驱动:org.apache.hive.jdbc.HiveDriver.", e);
+        }
+        return DriverManager.getConnection(hUrl, hiveResource.getHiveDbUser() , hiveResource.getHivePassword());
+    }
+}

+ 51 - 0
java-commons-cache/CacheEvict.java

@@ -0,0 +1,51 @@
+package com.primeton.dgs.kernel.core.cache;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ *
+ * 缓存删除策略
+ *
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2019/11/8
+ * Time: 17:45
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Inherited
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface CacheEvict {
+
+    /**
+     *
+     * 精确的删除缓存的 key, 一次性删除多个。
+     *
+     * 缓存的key,支持 ${var} 获取变量
+     */
+    String[] keys() default {};
+
+    /**
+     * 触发条件,只有满足条件的情况才会清除缓存
+     * 根据前缀,模糊删除
+     * @return
+     */
+    String keyPrefix() default "";
+
+    /**
+     * true表示清除value中的全部缓存,默认为false
+     *
+     * @return
+     */
+    boolean allEntries() default false;
+}

+ 39 - 0
java-commons-cache/Cacheable.java

@@ -0,0 +1,39 @@
+package com.primeton.dgs.kernel.core.cache;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2019/9/24
+ * Time: 14:27
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+@Inherited
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface Cacheable {
+
+
+    /**
+     * 返回 缓存 Key, 支持 ${} 获取变量
+     * @return 返回缓存的 Key
+     */
+    String key();
+
+
+    /**
+     * 过期时长。单位:s秒。默认永不过期,返回 0 或者 -1 永不过期
+     * @return
+     */
+    int expire() default -1;
+}

+ 102 - 0
java-commons-cache/ICache.java

@@ -0,0 +1,102 @@
+/** Copyright 2009 by primeton Corporation.
+ *   
+ * All rights reserved.
+ *
+ * This software is the confidential and proprietary information of
+ * primeton Corporation ('Confidential Information').  You
+ * shall not disclose such Confidential Information and shall use
+ * it only in accordance with the terms of the license agreement
+ * you entered into with primeton.
+ */
+package com.primeton.dgs.kernel.core.cache;
+
+import javax.validation.constraints.NotNull;
+import java.io.Closeable;
+
+/**
+ * @author xiongbin
+ *
+ * @author zhaopx Modify:2019/09/29 修改为支持泛型,废弃 replace, 增加 remove,size 方法
+ * @Version 2012-8-28
+ */
+public interface ICache<T> extends Closeable {
+
+	/**
+	 * 将数据保存至缓存中
+	 * @param key
+	 * @param value
+	 * @throws Exception
+	 */
+	void add(@NotNull String key, @NotNull T value);
+	
+	/**
+	 * 将数据保存至缓存中, 过期时间为 exp。 过期单位为:s(秒)
+	 * @param key
+	 * @param exp
+	 * @param value
+	 * @throws Exception
+	 */
+	void add(@NotNull String key, int exp, @NotNull T value);
+	
+	/**
+	 * 替换缓存中的数据
+	 *
+	 * 请使用 ${@link #add(String, Object)}
+	 * @param key
+	 * @param value
+	 * @throws Exception
+	 */
+	@Deprecated
+	void replace(@NotNull String key, @NotNull T value);
+	
+	/**
+	 * 请使用 ${@link #add(String, int, Object)}
+	 * @param key
+	 * @param exp
+	 * @param value
+	 * @throws Exception
+	 */
+	@Deprecated
+	void replace(@NotNull String key, int exp, @NotNull T value);
+	
+	/**
+	 * 获取缓存中的数据
+	 * @param key
+	 * @return
+	 * @throws Exception
+	 */
+	T get(@NotNull String key);
+
+
+	/**
+	 * 移除 某条缓存,不论过期时间
+	 * @param key 缓存的 KEY
+	 * @return
+	 */
+	T remove(@NotNull String key);
+
+
+	/**
+	 * 移除 指定前缀的缓存
+	 * @param prefix 缓存的 KEY 前缀,相当于在该前缀自动加 *
+	 * @return 返回了移除多少条
+	 */
+	int removeByPrefix(@NotNull String prefix);
+
+	/**
+	 * 清空缓存
+	 * @throws Exception
+	 */
+	void clear();
+
+
+	/**
+	 *
+	 * add @author zhaopx at: 2019-08-20 17:00
+	 *
+	 * 获得缓存队列的长度
+	 * @return 返回大于等于 0 则代表真实长度,返回 -1 代表无法获得
+	 */
+	int size();
+	
+}

+ 26 - 0
java-commons-cache/impl/AbstractCacheService.java

@@ -0,0 +1,26 @@
+package com.primeton.dgs.kernel.core.cache.impl;
+
+import org.springframework.context.ApplicationContext;
+
+import com.primeton.dgs.kernel.core.cache.ICacheKeyCreator;
+import com.primeton.dgs.kernel.core.cache.ICacheService;
+import com.primeton.dgs.kernel.core.common.SpringContextHelper;
+
+/**
+ * 需要进行缓存管理的服务父类
+ * @author liliang
+ * @version 2013-11-26
+ */
+public abstract class AbstractCacheService implements ICacheService{
+
+	public ICacheKeyCreator getKeyCreator()throws Exception{
+		ApplicationContext context = SpringContextHelper.getSpringContext();
+		return (ICacheKeyCreator)context.getBean(getKeyBeanName());
+	}
+	
+	/**
+	 * 获取key创建者的bean name
+	 * @return
+	 */
+	public abstract String getKeyBeanName();
+}

+ 149 - 0
java-commons-cache/impl/CacheFactory.java

@@ -0,0 +1,149 @@
+package com.primeton.dgs.kernel.core.cache.impl;
+
+import com.primeton.dgs.kernel.core.cache.ICache;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.FactoryBean;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.support.ClassPathXmlApplicationContext;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.Properties;
+
+/**
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2019/11/14
+ * Time: 14:22
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public class CacheFactory implements FactoryBean  {
+
+
+    /**
+     * logger
+     */
+    private static Logger logger = LoggerFactory.getLogger(CacheFactory.class);
+
+    /**
+     * 配置
+     */
+    private final Properties config;
+
+
+    /**
+     * Cache
+     */
+    private final ICache cache;
+
+    /**
+     * 默认是 ehcache
+     * @param config
+     */
+    public CacheFactory(Properties config) {
+        this(config == null ? "ehcache" : config.getProperty("type", "ehcache"), config);
+    }
+
+    public CacheFactory(String cacheType, Properties props) {
+        this.config = Optional.ofNullable(props).orElse(new Properties());
+        if(StringUtils.isBlank(cacheType) || "mem".equalsIgnoreCase(cacheType) ||
+                "ehcache".equalsIgnoreCase(cacheType)) {
+            logger.info("use ehcache.");
+            this.cache = initEhcache();
+        } else if("redis".equalsIgnoreCase(cacheType)) {
+            String host = this.config.getProperty("host");
+            if(StringUtils.isBlank(host)) {
+                throw new IllegalArgumentException("redis cache host must not be blank.");
+            }
+            int port = Integer.parseInt(this.config.getProperty("port", "3306"));
+            int db = Integer.parseInt(this.config.getProperty("db", "0"));
+            logger.info("use redis cache {}:{}/{}", host, port, db);
+            this.cache = initRedisCache(host, port, db);
+        } else if("memcache".equalsIgnoreCase(cacheType)) {
+            String address = this.config.getProperty("address");
+            if(StringUtils.isBlank(address)) {
+                throw new IllegalArgumentException("memcache address must not be blank. Usage: host1:port,host2:port");
+            }
+            logger.info("use memcache address {}", address);
+            this.cache = initMemCache(address);
+        } else {
+            throw new IllegalArgumentException("unknown cache type " + cacheType);
+        }
+    }
+
+    /**
+     * 初始化 MemCache
+     * @return
+     */
+    private ICache initMemCache(String hosts) {
+        List<InetSocketAddress> addresses = new ArrayList<>();
+        String[] addrs = hosts.split(",");
+        for (String addr : addrs) {
+            String[] hostAndPort = addr.split(":");
+            addresses.add(new InetSocketAddress(StringUtils.trim(hostAndPort[0]),
+                    Integer.parseInt(StringUtils.trim(hostAndPort[1]))));
+        }
+        net.rubyeye.xmemcached.XMemcachedClientBuilder clientBuilder =
+                new net.rubyeye.xmemcached.XMemcachedClientBuilder(addresses);
+        clientBuilder.setConnectionPoolSize(2);
+        clientBuilder.setCommandFactory(new net.rubyeye.xmemcached.command.TextCommandFactory());
+        clientBuilder.setSessionLocator(new net.rubyeye.xmemcached.impl.KetamaMemcachedSessionLocator());
+        clientBuilder.setTranscoder(new net.rubyeye.xmemcached.transcoders.SerializingTranscoder());
+        try {
+            return new Memcached(clientBuilder.build());
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    /**
+     * 初始化 Redis 客户端
+     * @param host
+     * @param port
+     * @param db
+     * @return
+     */
+    private ICache initRedisCache(String host, int port, int db) {
+        return new Redised(host, port, db);
+    }
+
+    /**
+     * 初始化 EHCache 客户端
+     * @return
+     */
+    private ICache initEhcache() {
+        return new Ehcache();
+    }
+
+    @Override
+    public Object getObject() throws Exception {
+        return cache;
+    }
+
+    @Override
+    public Class getObjectType() {
+        return ICache.class;
+    }
+
+    @Override
+    public boolean isSingleton() {
+        return true;
+    }
+
+
+    public static void main(String[] args) {
+        ApplicationContext context = new ClassPathXmlApplicationContext("/spring/corebean/context-cache.xml");
+        ICache cache = (ICache)context.getBean("cache");
+        System.out.println(cache.size());
+    }
+}

+ 254 - 0
java-commons-cache/impl/CacheInterceptorImpl.java

@@ -0,0 +1,254 @@
+/** Copyright 2009 by primeton Corporation.
+ *   
+ * All rights reserved.
+ *
+ * This software is the confidential and proprietary information of
+ * primeton Corporation ('Confidential Information').  You
+ * shall not disclose such Confidential Information and shall use
+ * it only in accordance with the terms of the license agreement
+ * you entered into with primeton.
+ */
+package com.primeton.dgs.kernel.core.cache.impl;
+
+import java.lang.reflect.Method;
+import java.security.MessageDigest;
+import java.util.HashMap;
+import java.util.Map;
+
+import com.primeton.dgs.kernel.core.cache.CacheEvict;
+import com.primeton.dgs.kernel.core.cache.Cacheable;
+import com.primeton.dgs.kernel.core.util.StringFullUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.SystemUtils;
+import org.aspectj.lang.ProceedingJoinPoint;
+
+import com.primeton.dgs.kernel.core.cache.ICache;
+import com.primeton.dgs.kernel.core.cache.ICacheInterceptor;
+import com.primeton.dgs.kernel.core.cache.ICacheKeyCreator;
+import org.aspectj.lang.Signature;
+import org.aspectj.lang.reflect.MethodSignature;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author xiongbin
+ * @author zhaopx 缓存拦截器
+ * @Version 2012-8-28
+ */
+public class CacheInterceptorImpl implements ICacheInterceptor {
+	private static final Logger log = LoggerFactory.getLogger(CacheInterceptorImpl.class);
+
+    /**
+     * 缓存
+     */
+	private ICache<Object> cache;
+
+
+    /**
+     * 缓存的 Key 生成逻辑管理器
+     */
+	private DefaultCacheKeyCreatorManager manager;
+
+
+	public Object add(ProceedingJoinPoint jp){
+		try {
+			Object[] args = jp.getArgs();
+			// 通过 AOP 拦截的类全名,获得该类的 ICacheKeyCreator,缓存 ID 生成器
+			// ICacheKeyCreator 一定是非空的
+			ICacheKeyCreator creator = manager.getCacheKeyCreator(jp.getTarget().getClass().getName());
+
+			// 获取到该方法需要缓存的 Name
+			String methodName = jp.getSignature().getName();
+
+			// 通过参数生成缓存 ID
+			String key = getKey(creator.getCacheKey(args));
+			if(StringUtils.isNotBlank(key)){
+				final String cacheKey = methodName + "_" + key;
+				synchronized (this) {
+					Object result = cache.get(cacheKey);
+					if(result == null){
+						result = jp.proceed();
+						// 调用取得返回值缓存,如果是从缓存中取出,无须继续放入缓存,减少
+						cache.add(cacheKey, creator.getExpTime(methodName), result);
+					}
+					return result;
+				}
+			}
+			// 无参数函数无法缓存,或者缓存的 Key 无法生成,不缓存
+			return jp.proceed();
+		} catch (Throwable e) {
+			log.error("AOP 调用接口异常。", e);
+			throw new IllegalStateException(e);
+		}
+	}
+
+
+	/**
+	 * 根据 接口的注解支持缓存
+     *
+     * add zhaopx at: 2019/09/24
+	 * @param jp
+	 * @return
+	 */
+	public Object cachedAnno(ProceedingJoinPoint jp){
+		try {
+			Object[] args = jp.getArgs();
+			Class<?> aClass = jp.getTarget().getClass();
+            Signature signature = jp.getSignature();
+            String methodName = signature.getName();
+            MethodSignature msig = null;
+            if (!(signature instanceof MethodSignature)) {
+                throw new IllegalArgumentException("该注解只能用于方法");
+            }
+            msig = (MethodSignature) signature;
+            Class[] parameterTypes = msig.getParameterTypes();
+            Method cachedMethod = aClass.getMethod(msig.getName(), parameterTypes);
+
+			// 方法参数,构成一个 Map,用于拼接 Cache key
+			final Map<String, String> argsMap = new HashMap<>(parameterTypes.length);
+			argsMap.put("methodName", methodName);
+			argsMap.put("className", aClass.getSimpleName());
+			for (int i = 0; i < args.length; i++) {
+				String argValue = (args[i] == null ? "" : String.valueOf(args[i]));
+				argsMap.put("arg" + i, argValue);
+			}
+
+			// JDK8 才执行
+			if(SystemUtils.IS_JAVA_1_8) {
+				int i = 0;
+				// 这样的方式导入,否则 JDK7 一下报错
+				java.lang.reflect.Parameter[] parameters = cachedMethod.getParameters();
+				for (java.lang.reflect.Parameter parameter : parameters) {
+					String argValue = (args[i] == null ? "" : String.valueOf(args[i]));
+					argsMap.put(parameter.getName(), argValue);
+					i++;
+				}
+			}
+
+            // 删除缓存的策略
+			CacheEvict cacheEvictAnno = cachedMethod.getAnnotation(CacheEvict.class);
+			if(cacheEvictAnno != null) {
+				if(cacheEvictAnno.allEntries()) {
+					// 删除所有缓存键
+					cache.clear();
+				} else {
+					String[] keys = cacheEvictAnno.keys();
+					for (String key : keys) {
+						// 依次删除所有缓存 Key
+						// 宏替换,替换 ${} 内的变量
+						key = StringFullUtils.getFullString(key, argsMap);
+						cache.remove(key);
+						log.warn("remove cache key {}", key);
+					}
+					// 前缀删除,当前还看怎么支持
+					if(StringUtils.isNotBlank(cacheEvictAnno.keyPrefix())) {
+						try {
+							cache.removeByPrefix(cacheEvictAnno.keyPrefix());
+						} catch (Exception e) {}
+					}
+				}
+			}
+
+            // 获取方法注解
+            Cacheable cachedAnno = cachedMethod.getAnnotation(Cacheable.class);
+            if(cachedAnno == null) {
+                // 接口上没有 Cacheable 的注解,不进行缓存
+                // log.info("execute no cache method {}.{}", aClass.getName(), methodName);
+                // 无参数函数无法缓存,或者缓存的 Key 无法生成,不缓存
+                return jp.proceed();
+            } else {
+            	final long start = System.currentTimeMillis();
+                String key = cachedAnno.key();
+                // 宏替换,替换 ${} 内的变量
+                key = StringFullUtils.getFullString(key, argsMap);
+                Object result = cache.get(key);
+                if(result == null){
+                    log.info("no cache key {}, execute method {}.{}", key, aClass.getName(), methodName);
+                    result = jp.proceed();
+                    if(cachedAnno.expire() > 0) {
+                        // 有缓存过期时间的
+                        cache.add(key, cachedAnno.expire(), result);
+                    } else {
+                        cache.add(key, result);
+                    }
+                } else {
+                    log.info("cached key {}, return result from cache(exe {}, cost time {} ms).",
+							key,
+							methodName,
+							(System.currentTimeMillis() - start));
+                }
+                return result;
+            }
+		} catch (Throwable e) {
+			log.error("AOP 调用接口异常。", e);
+			throw new IllegalStateException(e.getMessage());
+		}
+	}
+
+	private String getKey(String key) {
+		if(key.matches("[*:]")){
+			return "";
+		}
+		
+		System.out.println("memcachedKEY:"+key);
+		try {
+			key = encryptMD5(key);
+		} catch (Exception e) {
+			log.error("生成 md5 异常。", e);
+		}
+		return key;
+	}
+
+	public void refresh() {
+		try {
+			cache.clear();
+		} catch (Exception e) {
+			log.error("清除缓存异常。", e);
+		}
+	}
+
+
+    public DefaultCacheKeyCreatorManager getManager() {
+        return manager;
+    }
+
+    public void setManager(DefaultCacheKeyCreatorManager manager) {
+        this.manager = manager;
+    }
+
+    public ICache getCache() {
+        return cache;
+    }
+
+    public void setCache(ICache cache) {
+        this.cache = cache;
+    }
+
+
+    /**
+	 * 使用MD5加密
+	 * @param originalText 明文
+	 * @return 密文
+	 * @throws Exception JDK不支持MD5加密算法
+	 */
+	private String encryptMD5(String originalText) throws Exception {
+		MessageDigest md5 = MessageDigest.getInstance("MD5");
+		md5.update(originalText.getBytes());
+		byte[] digest = md5.digest();
+		
+		StringBuffer sb = new StringBuffer();
+		for (int i = 0; i < digest.length; i++) {
+			String s = Integer.toHexString(digest[i] & 0XFF);
+			if (s.length() == 1) {
+				sb.append(i).append(s);
+			} else {
+				sb.append(s);
+			}
+			if (i < digest.length-1) {
+				sb.append(i);
+			}
+		}
+		return sb.toString().toUpperCase();
+	}
+
+}

+ 159 - 0
java-commons-cache/impl/Ehcache.java

@@ -0,0 +1,159 @@
+/** Copyright 2009 by primeton Corporation.
+ *   
+ * All rights reserved.
+ *
+ * This software is the confidential and proprietary information of
+ * primeton Corporation ('Confidential Information').  You
+ * shall not disclose such Confidential Information and shall use
+ * it only in accordance with the terms of the license agreement
+ * you entered into with primeton.
+ */
+package com.primeton.dgs.kernel.core.cache.impl;
+
+import net.sf.ehcache.CacheManager;
+import net.sf.ehcache.Element;
+
+import com.primeton.dgs.kernel.core.cache.ICache;
+import net.sf.ehcache.search.Attribute;
+import net.sf.ehcache.search.Query;
+import net.sf.ehcache.search.Result;
+import net.sf.ehcache.search.Results;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.URL;
+import java.util.List;
+
+/**
+ * @author xiongbin
+ * @Version 2012-9-4
+ */
+public class Ehcache implements ICache<Object> {
+
+
+	private static Logger logger = LoggerFactory.getLogger(Ehcache.class);
+
+
+	/**
+	 * EHCache
+	 */
+	private final net.sf.ehcache.Ehcache cache;
+	
+	public Ehcache(){
+        URL resource = Ehcache.class.getResource("/ehcache.xml");
+        CacheManager manager = CacheManager.create(resource);
+		net.sf.ehcache.Ehcache ehcache = manager.getCache("Cache");
+		if(ehcache==null){
+			manager.addCache("Cache");
+			ehcache = manager.getEhcache("Cache");
+		}
+		this.cache = ehcache;
+	}
+
+	/* (non-Javadoc)
+	 * @see com.primeton.dgs.kernel.common.cache.ICache#add(java.lang.String, java.lang.Object)
+	 */
+	@Override
+	public void add(String key, Object value) {
+		Element element = new Element(key, value);
+		cache.put(element);
+	}
+
+	/* (non-Javadoc)
+	 * @see com.primeton.dgs.kernel.common.cache.ICache#add(java.lang.String, java.lang.Integer, java.lang.Object)
+	 */
+	@Override
+	public void add(String key, int exp, Object value) {
+		Element element = new Element(key, value);
+		element.setTimeToLive(exp);
+		cache.put(element);
+	}
+
+	/* (non-Javadoc)
+	 * @see com.primeton.dgs.kernel.common.cache.ICache#replace(java.lang.String, java.lang.Object)
+	 */
+	@Override
+	public void replace(String key, Object value) {
+		cache.put(new Element(key, value));
+	}
+
+	/* (non-Javadoc)
+	 * @see com.primeton.dgs.kernel.common.cache.ICache#replace(java.lang.String, java.lang.Integer, java.lang.Object)
+	 */
+	@Override
+	public void replace(String key, int exp, Object value) {
+		add(key, exp, value);
+	}
+
+	/* (non-Javadoc)
+	 * @see com.primeton.dgs.kernel.common.cache.ICache#get(java.lang.String)
+	 */
+	@Override
+	public Object get(String key) {
+		Element element = cache.get(key);
+		if(element != null){
+			return element.getValue();
+		}
+		return null;
+	}
+
+
+	/**
+	 * 移除某条缓存
+	 * @param key 缓存的 KEY
+	 * @return
+	 */
+	@Override
+	public Object remove(String key) {
+		Element element = cache.get(key);
+		cache.remove(key);
+		logger.info("remove cache key: {}", key);
+		if(element != null){
+			return element.getObjectValue();
+		}
+		return null;
+	}
+
+
+	@Override
+	public int removeByPrefix(String prefix) {
+        Attribute<Object> keySearch = cache.getSearchAttribute("key");
+		//创建一个用于查询的Query对象
+		Query query = cache.createQuery();
+		//给当前query添加一个筛选条件——可查询属性name的值等于“name1”
+		query.addCriteria(keySearch.ilike(prefix+"*"));
+
+		// ehcache 模糊删除
+		query.includeAttribute(keySearch);
+		query.includeKeys();
+		Results result = query.execute();
+		List<Result> keys = result.all();
+		for (Result key : keys) {
+			remove((String)key.getKey());
+		}
+		return result.size();
+	}
+
+	/* (non-Javadoc)
+	 * @see com.primeton.dgs.kernel.common.cache.ICache#clear()
+	 */
+	@Override
+	public void clear() {
+		cache.removeAll();
+	}
+
+    /**
+     * 获得缓存队列的长度
+     * @return 返回大于等于 0 则代表真实长度,返回 -1 代表无法获得
+     */
+    @Override public int size() {
+        return cache.getSize();
+    }
+
+
+	@Override
+	public void close() throws IOException {
+		cache.flush();
+	}
+}

+ 144 - 0
java-commons-cache/impl/Memcached.java

@@ -0,0 +1,144 @@
+/** Copyright 2009 by primeton Corporation.
+ *   
+ * All rights reserved.
+ *
+ * This software is the confidential and proprietary information of
+ * primeton Corporation ('Confidential Information').  You
+ * shall not disclose such Confidential Information and shall use
+ * it only in accordance with the terms of the license agreement
+ * you entered into with primeton.
+ */
+package com.primeton.dgs.kernel.core.cache.impl;
+
+import net.rubyeye.xmemcached.MemcachedClient;
+
+import com.primeton.dgs.kernel.core.cache.ICache;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Serializable;
+
+/**
+ * @author xiongbin
+ * @Version 2012-8-28
+ */
+public class Memcached implements ICache<Serializable> {
+
+
+
+	private static Logger logger = LoggerFactory.getLogger(Memcached.class);
+
+
+	private MemcachedClient memcachedClient;
+
+    public Memcached() {
+    }
+
+    public Memcached(MemcachedClient memcachedClient) {
+        this.memcachedClient = memcachedClient;
+    }
+
+    @Override
+	public void add(String key, Serializable value) {
+		try {
+			memcachedClient.add(key, 0, value);
+		} catch (Exception e) {
+			throw new IllegalStateException(e);
+		}
+	}
+
+	@Override
+	public void replace(String key, Serializable value) {
+		try {
+			memcachedClient.replace(key, 0, value);
+		} catch (Exception e) {
+			throw new IllegalStateException(e);
+		}
+	}
+
+	@Override
+	public Serializable get(String key) {
+		try {
+		    return memcachedClient.get(key);
+		} catch (Exception e) {
+			throw new IllegalStateException(e);
+		}
+	}
+
+
+    /**
+     * 移除某条缓存
+     * @param key 缓存的 KEY
+     * @return
+     */
+    @Override
+    public Serializable remove(String key) {
+        try {
+            Serializable obj = memcachedClient.get(key);
+            memcachedClient.delete(key);
+            return obj;
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+
+
+	@Override
+	public int removeByPrefix(String prefix) {
+		logger.warn("can not support removeByPrefix.");
+		return 0;
+	}
+
+
+	@Override
+	public void clear() {
+		try {
+		    memcachedClient.flushAll();
+		} catch (Exception e) {
+			throw new IllegalStateException(e);
+		}
+	}
+
+	@Override
+	public void add(String key, int exp, Serializable value) {
+		try {
+		    memcachedClient.add(key, exp, value);
+		} catch (Exception e) {
+			throw new IllegalStateException(e);
+		}
+	}
+
+	@Override
+	public void replace(String key, int exp, Serializable value) {
+		try {
+			memcachedClient.replace(key, exp, value);
+		} catch (Exception e) {
+			throw new IllegalStateException(e);
+		}
+	}
+
+	/**
+	 * 获得缓存队列的长度
+	 * @return 返回大于等于 0 则代表真实长度,返回 -1 代表无法获得
+	 */
+	@Override public int size() {
+		// memcache 无法获取当前长度
+		return -1;
+	}
+
+
+	@Override
+	public void close() throws IOException {
+		memcachedClient.shutdown();
+	}
+
+	public MemcachedClient getMemcachedClient() {
+        return memcachedClient;
+    }
+
+    public void setMemcachedClient(MemcachedClient memcachedClient) {
+        this.memcachedClient = memcachedClient;
+    }
+}

+ 158 - 0
java-commons-cache/impl/Redised.java

@@ -0,0 +1,158 @@
+package com.primeton.dgs.kernel.core.cache.impl;
+
+import com.primeton.dgs.kernel.core.cache.ICache;
+import com.primeton.dgs.kernel.core.cache.serde.Serializer;
+import com.primeton.dgs.kernel.core.cache.serde.JdkSerializer;
+import com.primeton.dgs.kernel.core.cache.serde.StringSerializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import redis.clients.jedis.Jedis;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.Set;
+
+/**
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2019/8/21
+ * Time: 18:55
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public class Redised implements ICache<Serializable> {
+
+
+    private static Logger logger = LoggerFactory.getLogger(Redised.class);
+
+
+
+    /**
+     * Redis 的 Java Client
+     */
+    Jedis jedis;
+
+
+    /**
+     * redis db index
+     */
+    private final int db;
+
+    /**
+     * KEY 序列化
+     */
+    private final Serializer<String> KEY_SERDE = new StringSerializer();
+
+    /**
+     * Redis Cache 序列化方式
+     */
+    private final Serializer<Serializable> VALUE_SERDE = new JdkSerializer();
+
+
+    public Redised() {
+        this("localhost", 6379, 0);
+    }
+
+
+    public Redised(String host, int port){
+        this(host, port, 0);
+    }
+
+    public Redised(String host, int port, int db){
+        this.db = db;
+        jedis = new Jedis(host, port);
+        jedis.select(db);
+    }
+
+    @Override
+    public void add(String key, Serializable value) {
+        if(value == null) {
+            return;
+        }
+        jedis.set(KEY_SERDE.serialize(key), VALUE_SERDE.serialize(value));
+    }
+
+    @Override
+    public void add(String key, int exp, Serializable value) {
+        byte[] keybytes = KEY_SERDE.serialize(key);
+        jedis.set(keybytes, VALUE_SERDE.serialize(value));
+        jedis.expire(keybytes, exp);
+    }
+
+    @Override
+    public void replace(String key, Serializable value) {
+        jedis.set(KEY_SERDE.serialize(key), VALUE_SERDE.serialize(value));
+    }
+
+    @Override
+    public void replace(String key, int exp, Serializable value) {
+        add(key, exp, value);
+    }
+
+    @Override
+    public Serializable get(String key) {
+        if(key == null) {
+            return null;
+        }
+        byte[] bytes = jedis.get(KEY_SERDE.serialize(key));
+        if(bytes == null) {
+            return null;
+        }
+        return VALUE_SERDE.deserialize(bytes);
+    }
+
+
+    /**
+     * 移除某条缓存
+     * @param key 缓存的 KEY
+     * @return
+     */
+    @Override
+    public Serializable remove(String key) {
+        if(key == null) {
+            return null;
+        }
+        logger.info("remove cache key: {}", key);
+        return jedis.del(KEY_SERDE.serialize(key));
+    }
+
+
+    @Override
+    public int removeByPrefix(String prefix) {
+        Set<String> keys = jedis.keys(prefix+"*");
+        if(keys != null && keys.size() > 0) {
+            for (String key : keys) {
+                remove(key);
+            }
+        }
+        return keys == null ? 0 : keys.size();
+    }
+
+    @Override
+    public void clear() {
+        jedis.flushDB();
+    }
+
+    @Override
+    public int size() {
+        return jedis.dbSize().intValue();
+    }
+
+    @Override
+    public void close() throws IOException {
+        jedis.close();
+    }
+
+    public static void main(String[] args) {
+        Redised redis = new Redised("localhost", 6379);
+        int size = redis.size();
+        System.out.println(size);
+
+        Set<String> keys = redis.jedis.keys("listMetadataTreeNode*");
+        System.out.println(keys);
+    }
+}

+ 63 - 0
java-commons-cache/serde/JdkSerializer.java

@@ -0,0 +1,63 @@
+/*
+ * Copyright 2010-2011 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.primeton.dgs.kernel.core.cache.serde;
+
+
+import java.io.*;
+
+/**
+ * Java Serialization Redis serializer.
+ * Delegates to the default (Java based) serializer input Spring 3.
+ *
+ * @author zhaopx
+ */
+public class JdkSerializer implements Serializer<Serializable> {
+
+    private static final long serialVersionUID = 1L;
+
+    public JdkSerializer() {
+
+    }
+
+
+    @Override
+    public Serializable deserialize(byte[] bytes) {
+        try (ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(bytes));){
+            Serializable o = (Serializable)in.readObject();
+            in.close();
+            return o;
+        } catch (Exception ex) {
+            throw new IllegalArgumentException("Cannot deserialize", ex);
+        }
+    }
+
+    @Override
+    public byte[] serialize(Serializable object) {
+        if (object == null) {
+            return new byte[0];
+        }
+        try {
+            ByteArrayOutputStream out = new ByteArrayOutputStream();
+            ObjectOutputStream outputStream = new ObjectOutputStream(out);
+            outputStream.writeObject(object);
+            outputStream.flush();
+            outputStream.close();
+            return out.toByteArray();
+        } catch (Exception ex) {
+            throw new IllegalArgumentException("Cannot serialize", ex);
+        }
+    }
+}

+ 45 - 0
java-commons-cache/serde/Serializer.java

@@ -0,0 +1,45 @@
+/*
+ * Copyright 2010-2011 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.primeton.dgs.kernel.core.cache.serde;
+
+import java.io.Serializable;
+
+/**
+ * Basic interface serialization and deserialization of Objects to byte arrays (binary data).
+ * <p/>
+ * It is recommended that implementations are designed to handle null objects/empty arrays on serialization and deserialization side.
+ * Note that Redis does not accept null keys or values but can return null replies (for non existing keys).
+ *
+ * @author zhaopx
+ */
+public interface Serializer<T> extends Serializable {
+
+    /**
+     * Serialize the given object to binary data.
+     *
+     * @param t object to serialize
+     * @return the equivalent binary data
+     */
+    byte[] serialize(T t);
+
+    /**
+     * Deserialize an object from the given binary data.
+     *
+     * @param bytes object binary representation
+     * @return the equivalent object instance
+     */
+    T deserialize(byte[] bytes);
+}

+ 46 - 0
java-commons-cache/serde/StringSerializer.java

@@ -0,0 +1,46 @@
+package com.primeton.dgs.kernel.core.cache.serde;
+
+
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+
+/**
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2019/11/14
+ * Time: 11:47
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public class StringSerializer implements Serializer<String> {
+
+
+    /**
+     * 默认的编码
+     */
+    final Charset defaultCharset;
+
+
+    public StringSerializer() {
+        this(StandardCharsets.UTF_8);
+    }
+
+
+    public StringSerializer(Charset charset) {
+        this.defaultCharset = charset;
+    }
+
+    @Override
+    public byte[] serialize(String s) {
+        return s.getBytes(defaultCharset);
+    }
+
+    @Override
+    public String deserialize(byte[] bytes) {
+        return new String(bytes);
+    }
+}

+ 61 - 0
java-message-mq/Event.java

@@ -0,0 +1,61 @@
+package com.primeton.dgs.kernel.core.message;
+
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2019/11/11
+ * Time: 16:02
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public class Event<T> {
+
+
+    /**
+     * 包含的消息 头
+     */
+    private final Map<String, Object> header = new HashMap<>(4);
+
+
+    /**
+     * 要发送的通道
+     */
+    private final String topic;
+
+    /**
+     * 消息体
+     */
+    private final T payload;
+
+
+    public Event(String topic, T payload) {
+        this.topic = topic;
+        this.payload = payload;
+    }
+
+
+    public void addHeader(String key, Object value) {
+        header.put(key, value);
+    }
+
+
+    public Map<String, Object> getHeader() {
+        return header;
+    }
+
+    public String getTopic() {
+        return topic;
+    }
+
+    public T getPayload() {
+        return payload;
+    }
+}

+ 27 - 0
java-message-mq/MessageConsumer.java

@@ -0,0 +1,27 @@
+package com.primeton.dgs.kernel.core.message;
+
+/**
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2019/11/11
+ * Time: 13:55
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public interface MessageConsumer {
+
+
+
+
+
+    /**
+     * 接收到消息的处理
+     * @param topic
+     * @param message
+     */
+    public void onMessage(String topic, Object message);
+}

+ 37 - 0
java-message-mq/MessageProvider.java

@@ -0,0 +1,37 @@
+package com.primeton.dgs.kernel.core.message;
+
+/**
+ *
+ * 发行消息,发送总是单例模式进行的。
+ *
+ * @see {@link MessageService#getMessageProvider()} 获得消息发送提供者
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2019/11/11
+ * Time: 13:53
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public interface MessageProvider {
+
+
+    /**
+     * 发送消息
+     * @param topic
+     * @param message
+     */
+    public void send(String topic, Object message);
+
+
+    /**
+     * 发送消息
+     * @param event 消息实体
+     */
+    public void send(Event<Object> event);
+
+}

+ 242 - 0
java-message-mq/MessageService.java

@@ -0,0 +1,242 @@
+package com.primeton.dgs.kernel.core.message;
+
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.Multimap;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collection;
+import java.util.Queue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ *
+ * 消息服务。  实现类似 JMS 的消息功能
+ *
+ *
+ * <pre>
+ *
+ * Created by zhaopx.
+ * User: zhaopx
+ * Date: 2019/11/11
+ * Time: 13:53
+ *
+ * </pre>
+ *
+ * @author zhaopx
+ */
+public class MessageService implements Runnable {
+
+
+    /**
+     * 在 Spring 中注入的 Bean 名称
+     */
+    public static final String BEAN_NAME = "messageService";
+
+    private static Logger logger = LoggerFactory.getLogger(MessageService.class);
+
+
+    /**
+     * Consumer 和 topic 的映射
+     */
+    private final Multimap<MessageConsumer, String> CONSUMER_AND_TOPIC_MAP = HashMultimap.create();
+
+
+    /**
+     * 内部线程
+     */
+    private final Thread CONSUMER_THREAD = new Thread(this);
+
+
+    /**
+     * 消息队列
+     */
+    private final Queue<Event> MESSAGE_QUEUE = new LinkedBlockingQueue<>();
+
+
+    /**
+     * 第一次启动,无需等待
+     */
+    private boolean hasMessage = true;
+
+
+    /**
+     * 防止遍历 Consumer 时,对 Consumer add, remove
+     */
+    private final ReentrantLock CONSUMER_LOCK = new ReentrantLock();
+
+    /**
+     * 当前系统状态
+     */
+    private boolean shutdown = false;
+
+    /**
+     * 初始化,线程
+     */
+    public void init() {
+        CONSUMER_THREAD.setDaemon(true);
+        CONSUMER_THREAD.setName("message_consumer_thread");
+        CONSUMER_THREAD.start();
+    }
+
+    /**
+     * 提供监听服务,阻塞当前线程
+     */
+    public void serve() {
+        try {
+            CONSUMER_THREAD.join();
+        } catch (InterruptedException e) {
+            // 当前线程执行中断、向上传导
+            Thread.currentThread().interrupt();
+        }
+    }
+
+
+    @Override
+    public void run() {
+        // 只要没有 shutdown,一直执行
+        while (!shutdown) {
+            if(!hasMessage) {
+                synchronized (CONSUMER_THREAD) {
+                    try {
+                        // 无消息,等待 5s
+                        CONSUMER_THREAD.wait(5 * 1000L);
+                    } catch (InterruptedException e) {
+                        logger.error("Interrupted!", e);
+                        Thread.currentThread().interrupt();
+                    }
+                }
+            }
+            Event message = MESSAGE_QUEUE.poll();
+
+            // 是否还有消息,如果没有消息下一次等待时间会偏长
+            hasMessage = (message != null);
+            if(message == null) {
+                continue;
+            }
+
+            // 获取到消息
+            CONSUMER_LOCK.lock();
+            try {
+                for (MessageConsumer consumer : CONSUMER_AND_TOPIC_MAP.keySet()) {
+                    Collection<String> topics = CONSUMER_AND_TOPIC_MAP.get(consumer);
+                    String topic = StringUtils.upperCase(message.getTopic());
+                    // topic 符合 Consumer 的 topic 订阅,则广播
+                    if(topic != null && topics.contains(topic)){
+                        // 广播消息
+                        try {
+                            consumer.onMessage(message.getTopic(), message.getPayload());
+                        } catch (Exception e) {
+                            logger.error(consumer.getClass().getName() + " 处理消息异常。", e);
+                        }
+                    }
+                }
+            } catch (Exception e) {
+                logger.error("消息处理异常。", e);
+            } finally {
+                CONSUMER_LOCK.unlock();
+            }
+        }
+    }
+
+    /**
+     * 停止消息服务
+     */
+    public void shutdown() {
+        shutdown = true;
+        // 新的消息,唤醒 Consumer 线程
+        synchronized (CONSUMER_THREAD) {
+            CONSUMER_THREAD.notifyAll();
+        }
+        CONSUMER_THREAD.interrupt();
+        try {
+            CONSUMER_THREAD.join();
+        } catch (InterruptedException e) {
+            logger.error("Interrupted!", e);
+            Thread.currentThread().interrupt();
+        }
+    }
+
+
+    /**
+     * 发消息
+     * @param event
+     */
+    public void sendMessage(Event event) {
+        this.MESSAGE_QUEUE.offer(event);
+
+        // 新的消息,唤醒 Consumer 线程
+        synchronized (CONSUMER_THREAD) {
+            CONSUMER_THREAD.notify();
+        }
+    }
+
+
+    /**
+     * 获得一个消息发送提供者
+     * @return
+     */
+    public MessageProvider getMessageProvider() {
+        return new DefaultMessageProvider();
+    }
+
+
+    /**
+     * 添加 Consumer
+     * @param topic
+     * @param consumer
+     */
+    public void addMessageConsumer(String topic, MessageConsumer consumer) {
+        CONSUMER_LOCK.lock();
+        try {
+            // 改变为大写
+            CONSUMER_AND_TOPIC_MAP.put(consumer, topic.toUpperCase());
+        } finally {
+            CONSUMER_LOCK.unlock();
+        }
+    }
+
+
+    /**
+     * 移除 消息 监听者
+     * @param consumer
+     * @param consumer
+     */
+    public void removeMessageConsumer(MessageConsumer consumer) {
+        CONSUMER_LOCK.lock();
+        try {
+            CONSUMER_AND_TOPIC_MAP.removeAll(consumer);
+        } finally {
+            CONSUMER_LOCK.unlock();
+        }
+    }
+
+
+    class DefaultMessageProvider implements MessageProvider {
+
+        @Override
+        public void send(String topic, Object message) {
+            sendMessage(new Event(topic, message));
+        }
+
+        @Override
+        public void send(Event<Object> event) {
+            sendMessage(event);
+        }
+    }
+
+
+    public static void main(String[] args) throws InterruptedException {
+        MessageService messageService = new MessageService();
+        messageService.init();
+        messageService.addMessageConsumer("hello", ((topic, message) -> System.out.println(message)));
+        messageService.getMessageProvider().send("hello", "AAAAAAAa");
+        messageService.getMessageProvider().send("hello", "BBBBBBBB");
+        Thread.sleep(3000);
+        messageService.getMessageProvider().send("hello", "CCCCCCCC");
+        Thread.sleep(100000);
+    }
+}
+