소스 검색

scala 操作数据库和简答数据源实现

zhzhenqin 9 달 전
부모
커밋
1e495cc007

+ 20 - 0
java-simple-dspool/PooledDataSource.java

@@ -0,0 +1,20 @@
+package com.yiidata.dataops.common.bigdata;
+
+
+import java.io.Closeable;
+import java.util.Properties;
+
+import javax.sql.DataSource;
+
+/**
+ *
+ * Hive DAtaSource 实现
+ *
+ *  @author zhaopx
+ */
+public interface PooledDataSource extends DataSource, Closeable {
+
+	public void setProperties(Properties properties);
+	
+	public void setIdleValidationQuery(int idleInSeconds, String validationQuery);
+}

+ 209 - 0
java-simple-dspool/SimpleDataSource.java

@@ -0,0 +1,209 @@
+package com.yiidata.dataops.common.bigdata;
+
+import lombok.extern.slf4j.Slf4j;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.util.Date;
+import java.util.Properties;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+
+/**
+ *  
+ * @author zhaopx
+ */
+
+@Slf4j
+public class SimpleDataSource implements PooledDataSource {	 
+
+	private ConcurrentMap<Connection, Date> pool = new ConcurrentHashMap<Connection, Date>();
+	
+
+	private int maxSize;
+	private int minSize;
+	private int waitTime;
+	
+	private Semaphore semaphore;
+
+	private final Properties connProps=new Properties();
+
+
+	private HiveConnectionService connectionService;
+	
+	public SimpleDataSource(HiveConnectionService connectionService, Properties poolProperties) {
+		this.connectionService = connectionService;
+		connProps.putAll(poolProperties);
+
+		maxSize  = Integer.parseInt(poolProperties.getProperty("pool.max", "15"));
+		minSize  = Integer.parseInt(poolProperties.getProperty("pool.min", "3"));
+		waitTime = Integer.parseInt(poolProperties.getProperty("pool.waitTime", "5"));
+		initConnections(poolProperties);
+	}
+
+	private void initConnections(Properties poolProperties) {
+		log.info("Initializing simple data source{ pool.max = " + maxSize + ", pool.min = " + minSize + "}");
+		semaphore = new Semaphore(maxSize, false);
+		if (minSize > 0 && minSize < maxSize) {
+			try {
+				// 尝试获得连接
+				Connection conn = getConnection();
+				if(conn!=null){
+					conn.close();
+				}
+			} catch (SQLException e) {
+				throw new IllegalArgumentException(e);
+			}
+		}
+	}
+
+	public void close() throws IOException {
+		Exception ex = null;
+		for (Connection conn : pool.keySet()) {
+			try {
+				conn.close();
+			} catch (Exception e) { ex = e; }
+		}
+		pool.clear();
+		if(ex != null) {
+			throw new IOException(ex);
+		}
+		log.info("closed data source{ pool.max = " + maxSize + ", pool.min = " + minSize + "}");
+	}
+	
+	private void closeConnection(Connection realConnection) throws SQLException {
+		synchronized (pool) {
+			if (pool.size() <= maxSize) {
+				pool.put(realConnection, new Date());
+				return;
+			}
+		}
+			
+		try {
+			realConnection.close();
+		} finally {
+			semaphore.release();
+		}
+	}
+
+	public Connection getConnection() throws SQLException {
+		return getConnection(null, null);
+	}
+
+	public Connection getConnection(String username, String password) throws SQLException {		 
+		synchronized (pool) {
+			if (!pool.isEmpty()) {
+				Connection realConn = pool.keySet().iterator().next();
+				pool.remove(realConn);
+
+				// hive jdbc 不支持设置  AutoCommit
+				//realConn.setAutoCommit(true);
+
+				return getProxyConnection(realConn);
+			}
+		}
+		 	
+		try {
+			if(semaphore.tryAcquire(waitTime,TimeUnit.SECONDS)) {
+				return getProxyConnection(getRealConnection(username, password));
+			}else {
+				throw new IllegalArgumentException("Connection pool is full: "+maxSize);
+			}
+		}catch(SQLException e) {
+			semaphore.release();
+			throw e;
+		} catch (InterruptedException e) {
+			log.error(e.getMessage(),e);
+			Thread.currentThread().interrupt();
+			return null;
+		}
+	}
+	
+	private Connection getProxyConnection(final Connection realConnection) {		
+		InvocationHandler handler = new InvocationHandler() {
+			public Object invoke(Object proxy, Method method, Object[] params) throws Exception {
+				Object ret = null;
+				if ("close".equals(method.getName())) {
+					closeConnection(realConnection);
+				}else if ("unwrap".equals(method.getName())) {
+					ret=realConnection;
+				} else {
+					ret = method.invoke(realConnection, params);
+				}
+				return ret;
+			}
+		};
+		return (Connection) Proxy.newProxyInstance(Connection.class.getClassLoader(), new Class[] { Connection.class }, handler);
+	}
+
+	
+
+	protected Connection getRealConnection(String username, String password) throws SQLException {
+		try {
+			return connectionService.getConnection();
+		} catch (Exception e) {
+			throw new SQLException(e);
+		}
+	}
+
+	public void setProperties(Properties properties){
+		this.connProps.putAll(properties);
+	}
+	
+	public PrintWriter getLogWriter() throws SQLException {
+		return null;
+	}
+
+	public void setLogWriter(PrintWriter out) throws SQLException {
+		throw new UnsupportedOperationException();
+	}
+
+	public void setLoginTimeout(int seconds) throws SQLException {
+		throw new UnsupportedOperationException();
+	}
+
+	public int getLoginTimeout() throws SQLException {
+		return 0;
+	}
+
+	public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
+		return null;
+	}
+
+	public <T> T unwrap(Class<T> iface) throws SQLException {
+		return null;
+	}
+
+	public boolean isWrapperFor(Class<?> iface) throws SQLException {
+		return false;
+	}
+
+
+	public void setIdleValidationQuery(int idleInSeconds,String validationQuery){
+		//do noting
+	}
+
+	public int getMaxSize() {
+		return maxSize;
+	}
+
+	public int getMinSize() {
+		return minSize;
+	}
+
+	public int getWaitTime() {
+		return waitTime;
+	}
+
+	public Properties getConnProps() {
+		return connProps;
+	}
+}

+ 340 - 0
scala-sql-runner/JdbcDFWriter.scala

@@ -0,0 +1,340 @@
+package com.yiidata.amc.jdbc
+
+import java.util
+import java.util.Properties
+
+import com.google.common.collect.ImmutableList.Builder
+import com.google.common.collect.{UnmodifiableIterator, ImmutableList}
+import org.apache.commons.dbcp.BasicDataSource
+import org.apache.spark.api.java.function.VoidFunction
+import org.apache.spark.rdd.RDD
+
+
+
+/**
+  *
+  * <pre>
+  * new JdbcDFWriter("update HOLIDAY_TABLE_1 set STATUS = '1' where DATETIME = ? and HOLIDAY = ? ",
+  *    Array("DATETIME", "HOLIDAY"), select).save(pro)
+  * </pre>
+  *
+  *
+  * <p>
+  * JdbcRDDWriter 和 JdbcDFWriter 在列取值时操作有明显不同。JdbcDFWriter 靠列名取值,而JdbcRDDWriter 靠列下标取值。
+  *
+  * PstateSetter 数量应该和SQL中的?数量相同
+  *
+  * </p>
+  *
+  * Created by ZhenQin on 2018/2/6 0006-18:01
+  * Vendor: yiidata.com
+  *
+  */
+class JdbcDFWriter(sql:String, columnsNames:Array[String], df:org.apache.spark.sql.DataFrame){
+
+  {
+    val arrays: util.List[String] = new util.ArrayList[String](columnsNames.length)
+    columnsNames.foreach(c=>{arrays.add(c)})
+
+    val split: Array[String] = (sql + " ").split("\\?")
+    if(columnsNames.length != (split.length -1)){
+      throw new IllegalArgumentException("SQL 中 ? 数量必须 和 columnsNames 数量相等,且必须对应。")
+    }
+
+    // 如果传入的列有DataFrame 中不包含的列,则抛出异常
+    df.schema.foreach(s=>{arrays.remove(s.name)})
+    if(!arrays.isEmpty){
+      throw new IllegalArgumentException("unknown field: " + arrays.toString)
+    }
+  }
+
+  /**
+    * 保存该 DataFrame 到数据库
+    *
+    * @param props 数据库连接信息
+    */
+  def save(props:Properties):Unit = {
+    val builder: Builder[PstateSetter] = ImmutableList.builder()
+
+    // 要注意顺序
+    val fieldMap:util.Map[String, PstateSetter] = new util.HashMap[String, PstateSetter](df.schema.length)
+    df.schema.foreach(f=>{
+      f.dataType match {
+        case org.apache.spark.sql.types.IntegerType => fieldMap.put(f.name, new IntPstateSetter(f.name))
+        case org.apache.spark.sql.types.LongType => fieldMap.put(f.name, new LongPstateSetter(f.name))
+        case org.apache.spark.sql.types.DoubleType => fieldMap.put(f.name, new DoublePstateSetter(f.name))
+        case org.apache.spark.sql.types.FloatType => fieldMap.put(f.name, new FloatPstateSetter(f.name))
+        case org.apache.spark.sql.types.ShortType => fieldMap.put(f.name, new ShortPstateSetter(f.name))
+        case org.apache.spark.sql.types.ByteType => fieldMap.put(f.name, new BytePstateSetter(f.name))
+        case org.apache.spark.sql.types.BooleanType => fieldMap.put(f.name, new BoolPstateSetter(f.name))
+        case org.apache.spark.sql.types.StringType => fieldMap.put(f.name, new StringPstateSetter(f.name))
+        case org.apache.spark.sql.types.BinaryType => fieldMap.put(f.name, new StringPstateSetter(f.name))
+        case org.apache.spark.sql.types.TimestampType => fieldMap.put(f.name, new TimestampPstateSetter(f.name))
+        case org.apache.spark.sql.types.DateType => fieldMap.put(f.name, new DatePstateSetter(f.name))
+        case t: org.apache.spark.sql.types.DecimalType => fieldMap.put(f.name, new DecimalPstateSetter(f.name))
+        case _ => None
+      }
+    })
+
+    // 顺序的加入每一个参数列
+    for(col <- columnsNames){
+      builder.add(fieldMap.get(col))
+    }
+
+    df.javaRDD.foreachPartition(new JdbcPartitionFunction(sql, builder.build(), props))
+  }
+}
+
+
+/**
+  * Jdbc RDD, 存储到数据库中。
+  *
+  * <pre>
+  * new JdbcRDDWriter("update HOLIDAY_TABLE_1 set STATUS = '1' where DATETIME = ? and HOLIDAY = ? ",
+  *    Array(new StringPstateSetter("$1"), new StringPstateSetter("$2")), rdd)
+  * </pre>
+  *
+  * $1 则取RDD Line 第二列数据,从 $0 开始
+  * $2 时取RDD Line 第三列数据,从 $0 开始
+  *
+  * JdbcRDDWriter 和 JdbcDFWriter 在列取值时操作有明显不同。JdbcDFWriter 靠 列名取值,而JdbcRDDWriter 靠列下标取值。
+  *
+  * 一般的 RDD 中应该为一个 Array(),这样的结果可以使用 array(0) 来取得对应的值。
+  *
+  * PstateSetter 数量应该和SQL中的?数量相同
+  *
+  * @param sql 执行更新的SQL
+  * @param columnTypes 字段
+  * @param rdd RDD
+  */
+class JdbcRDDWriter(sql:String, columnTypes:Array[PstateSetter], @transient rdd:RDD[Array[Any]]) extends Serializable {
+
+  {
+    val split: Array[String] = (sql + " ").split("\\?")
+    if(columnTypes.length != (split.length -1)){
+      throw new IllegalArgumentException("SQL 中 ? 数量必须 和 columnsNames 数量相等,且必须对应。")
+    }
+
+  }
+
+  /**
+    * 保存该 RDD 到数据库
+    *
+    * @param props 数据库连接信息
+    */
+  def save(props:Properties):Unit = {
+    val builder: Builder[PstateSetter] = ImmutableList.builder()
+    rdd.foreachPartition(iter =>{
+      val batchSize = Integer.parseInt(props.getProperty("batchsize", "2000"))
+      val dataSource = new BasicDataSource()
+      dataSource.setMaxActive(1)
+      dataSource.setMinIdle(1)
+      dataSource.setInitialSize(1)
+      dataSource.setDriverClassName(props.getProperty("driver"))
+      dataSource.setUrl(props.getProperty("url"))
+      dataSource.setUsername(props.getProperty("user"))
+      dataSource.setPassword(props.getProperty("password"))
+      val conn = dataSource.getConnection
+      conn.setAutoCommit(false)
+      val st = conn.prepareStatement(sql)
+
+      try {
+        var counter = 0
+        while (iter.hasNext) {
+          val line = iter.next()
+          var i = 1
+
+          for (pstateSetter <- columnTypes) {
+            val v = line(pstateSetter.index)
+            pstateSetter.setValue(st, i, v)
+            i += 1
+          }
+
+          counter += 1
+          st.addBatch()
+
+          // 一个批次,执行一次
+          if (counter >= batchSize) {
+            st.executeBatch()
+            st.clearBatch()
+            counter = 0
+          }
+        }
+
+        // 最后不够一个批次的数据,一次性提交
+        if(counter % batchSize > 0) {
+          st.executeBatch()
+        }
+        conn.commit();
+      } finally {
+        // 关闭连接
+        try {
+          st.close()
+          conn.close()
+
+          dataSource.close()
+        } catch {
+          case e:Exception => e.printStackTrace()
+        }
+      }
+    })
+  }
+}
+
+
+/**
+  * Jdbc Array(数组), 存储到数据库中。
+  *
+  * <pre>
+  * new JdbcArrayWriter("update HOLIDAY_TABLE_1 set STATUS = '1' where DATETIME = ? and HOLIDAY = ? ",
+  *    Array(new StringPstateSetter("$1"), new StringPstateSetter("$2")), array)
+  * </pre>
+  *
+  * $1 则取RDD Line 第二列数据,从 $0 开始
+  * $2 时取RDD Line 第三列数据,从 $0 开始
+  *
+  *
+  * 一般的 Array() 中应该为一个 Array[Any],这样的结果可以使用 array(0) 来取得对应的值。
+  *
+  * PstateSetter 数量应该和SQL中的?数量相同
+  *
+  * @param sql 执行更新的SQL
+  * @param columnTypes 字段
+  * @param list Array
+  */
+class JdbcArrayWriter(sql:String, columnTypes:Array[PstateSetter], @transient list:Array[Array[Any]]) extends Serializable {
+
+  {
+    val split: Array[String] = (sql + " ").split("\\?")
+    if(columnTypes.length != (split.length -1)){
+      throw new IllegalArgumentException("SQL 中 ? 数量必须 和 columnsNames 数量相等,且必须对应。")
+    }
+
+  }
+
+  /**
+    * 保存该 RDD 到数据库
+    *
+    * @param props 数据库连接信息
+    */
+  def save(props:Properties):Unit = {
+    val builder: Builder[PstateSetter] = ImmutableList.builder()
+    val batchSize = Integer.parseInt(props.getProperty("batchsize", "2000"))
+    val dataSource = new BasicDataSource()
+    dataSource.setMaxActive(1)
+    dataSource.setMinIdle(1)
+    dataSource.setInitialSize(1)
+    dataSource.setDriverClassName(props.getProperty("driver"))
+    dataSource.setUrl(props.getProperty("url"))
+    dataSource.setUsername(props.getProperty("user"))
+    dataSource.setPassword(props.getProperty("password"))
+    val conn = dataSource.getConnection
+    conn.setAutoCommit(false)
+    val st = conn.prepareStatement(sql)
+    try {
+      var counter = 0
+      val iter = list.iterator
+      while (iter.hasNext) {
+        val line = iter.next()
+        var i = 1
+
+          for (pstateSetter <- columnTypes) {
+            val v = line(pstateSetter.index)
+            pstateSetter.setValue(st, i, v)
+            i += 1
+          }
+
+          counter += 1
+          st.addBatch()
+
+          // 一个批次,执行一次
+          if (counter >= batchSize) {
+            st.executeBatch()
+            st.clearBatch()
+            counter = 0
+          }
+        }
+
+        // 最后不够一个批次的数据,一次性提交
+        if(counter % batchSize > 0) {
+          st.executeBatch()
+        }
+        conn.commit();
+      } finally {
+        // 关闭连接
+        try {
+        st.close()
+        conn.close()
+
+        dataSource.close()
+      } catch {
+        case e:Exception => e.printStackTrace()
+      }
+    }
+  }
+}
+
+
+class JdbcPartitionFunction(sql:String, ps:ImmutableList[PstateSetter], pw:Properties)
+  extends VoidFunction[util.Iterator[org.apache.spark.sql.Row]] {
+
+  /**
+    * 每 batchSize 个提交一次
+    */
+  val batchSize = Integer.parseInt(pw.getProperty("batchsize", "2000"))
+
+
+  override def call(iter: util.Iterator[org.apache.spark.sql.Row]): Unit = {
+    val dataSource = new BasicDataSource()
+    dataSource.setMaxActive(1)
+    dataSource.setMinIdle(1)
+    dataSource.setInitialSize(1)
+    dataSource.setDriverClassName(pw.getProperty("driver"))
+    dataSource.setUrl(pw.getProperty("url"))
+    dataSource.setUsername(pw.getProperty("user"))
+    dataSource.setPassword(pw.getProperty("password"))
+    val conn = dataSource.getConnection
+    conn.setAutoCommit(false)
+    val st = conn.prepareStatement(sql)
+    var counter = 0
+    try {
+      while(iter.hasNext()) {
+        val row = iter.next()
+        var i = 1
+        val iterator: UnmodifiableIterator[PstateSetter] = ps.iterator()
+        while(iterator.hasNext) {
+          val pstateSetter: PstateSetter = iterator.next()
+          pstateSetter.setValue(st, i, row)
+          i += 1
+        }
+
+        counter += 1
+        st.addBatch()
+
+        // 一个批次,执行一次
+        if(counter >= batchSize){
+          st.executeBatch()
+          st.clearBatch()
+          counter = 0
+        }
+      }
+
+      // 最后不够一个批次的数据,一次性提交
+      if(counter % batchSize > 0) {
+        st.executeBatch()
+      }
+      conn.commit();
+    } finally {
+      // 关闭连接
+      try {
+        st.close()
+        conn.close()
+
+        dataSource.close()
+      } catch {
+        case e:Exception => e.printStackTrace()
+      }
+    }
+  }
+}
+

+ 52 - 0
scala-sql-runner/JdbcReader.scala

@@ -0,0 +1,52 @@
+package com.yiidata.amc.jdbc
+
+import java.util
+import java.util.Properties
+
+import org.apache.commons.dbcp.BasicDataSource
+import org.apache.commons.dbutils.QueryRunner
+import org.apache.commons.dbutils.handlers.MapListHandler
+
+
+
+/**
+ * <p>
+ * 从数据库中读取数据
+ * </p>
+ *
+ * Created by ZhenQin on 2018/2/10 0010-10:35
+ * Vendor: yiidata.com
+ * @author ZhenQin
+ */
+trait JdbcReader[T] {
+
+  def read(sql:String, ps:Properties):T
+}
+
+/**
+  * JDBC 读取表数据
+  *
+  * @author ZhenQin
+  *
+  */
+class JdbcRowReader extends JdbcReader[util.List[util.Map[String, AnyRef]]] {
+  override def read(sql:String, ps: Properties): util.List[util.Map[String, AnyRef]] = {
+    val dataSource = new BasicDataSource()
+    dataSource.setMaxActive(1)
+    dataSource.setMinIdle(1)
+    dataSource.setInitialSize(1)
+    dataSource.setDriverClassName(ps.getProperty("driver"))
+    dataSource.setUrl(ps.getProperty("url"))
+    dataSource.setUsername(ps.getProperty("user"))
+    dataSource.setPassword(ps.getProperty("password"))
+
+    val qr = new QueryRunner(dataSource)
+
+    try {
+      val list: util.List[util.Map[String, AnyRef]] = qr.query(sql, new MapListHandler())
+      list
+    } finally {
+      dataSource.close()
+    }
+  }
+}

+ 332 - 0
scala-sql-runner/PstateSetter.scala

@@ -0,0 +1,332 @@
+package com.yiidata.amc.jdbc
+
+import java.math.BigDecimal
+import java.sql.{Types, Timestamp, Date, PreparedStatement}
+
+import org.apache.spark.sql.Row
+
+/**
+  *
+  * <p>
+  * JDBC 入库,数据类型匹配的 statement set value,有空值检查
+  * </p>
+  *
+  * Created by ZhenQin on 2018/2/6 0006-17:17
+  * Vendor: yiidata.com
+  *
+  */
+abstract class PstateSetter(fieldName:String) extends Serializable {
+
+
+  /**
+    * 修改的字段名称
+    */
+  val field:String = fieldName
+
+
+  /**
+    * 该列表属于第几列,RDD 该从 0 开始
+    */
+  val index = try {
+    Integer.parseInt(fieldName.replaceAll("\\$", ""))
+  } catch {
+    case e:NumberFormatException => 0
+  }
+
+  /**
+    * 设置值
+    *
+    * @param ps 数据库statement
+    * @param num 第几个参数
+    * @param value 当前行数据
+    */
+  def setValue(ps:PreparedStatement, num:Int, value: Any): Unit
+
+}
+
+
+
+
+class IntPstateSetter(fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.INTEGER)
+      } else {
+        ps.setInt(num, row.getInt(i))
+      }
+      return
+    }
+
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.INTEGER)
+    } else {
+      ps.setInt(num, value.asInstanceOf[Int])
+    }
+
+  }
+}
+
+
+class StringPstateSetter(fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.VARCHAR)
+      } else {
+        ps.setString(num, row.getString(i))
+      }
+      return
+    }
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.VARCHAR)
+    } else {
+      ps.setString(num, value.asInstanceOf[String])
+    }
+  }
+}
+
+
+
+class LongPstateSetter(fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.BIGINT)
+      } else {
+        ps.setLong(num, row.getLong(i))
+      }
+      return
+    }
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.BIGINT)
+    } else {
+      ps.setLong(num, value.asInstanceOf[Long])
+    }
+  }
+}
+
+
+class DatePstateSetter(fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.DATE)
+      } else {
+        ps.setDate(num, row.getDate(i))
+      }
+      return
+    }
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.DATE)
+    } else {
+      ps.setDate(num, value.asInstanceOf[Date])
+    }
+  }
+}
+
+
+class TimestampPstateSetter(fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.TIMESTAMP)
+      } else {
+        ps.setTimestamp(num, row.getTimestamp(i))
+      }
+      return
+    }
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.TIMESTAMP)
+    } else {
+      ps.setTimestamp(num, value.asInstanceOf[Timestamp])
+    }
+  }
+}
+
+
+class DoublePstateSetter(fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.DOUBLE)
+      } else {
+        ps.setDouble(num, row.getDouble(i))
+      }
+      return
+    }
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.DOUBLE)
+    } else {
+      ps.setDouble(num, value.asInstanceOf[Double])
+    }
+  }
+}
+
+
+class FloatPstateSetter(fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.FLOAT)
+      } else {
+        ps.setFloat(num, row.getFloat(i))
+      }
+      return
+    }
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.FLOAT)
+    } else {
+      ps.setFloat(num, value.asInstanceOf[Float])
+    }
+  }
+}
+
+
+class BytePstateSetter(fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.TINYINT)
+      } else {
+        ps.setByte(num, row.getByte(i))
+      }
+      return
+    }
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.TINYINT)
+    } else {
+      ps.setByte(num, value.asInstanceOf[Byte])
+    }
+
+  }
+}
+
+
+
+class ShortPstateSetter(fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.SMALLINT)
+      } else {
+        ps.setShort(num, row.getShort(i))
+      }
+      return
+    }
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.SMALLINT)
+    } else {
+      ps.setShort(num, value.asInstanceOf[Short])
+    }
+
+  }
+}
+
+
+
+class BoolPstateSetter(fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.BOOLEAN)
+      } else {
+        ps.setBoolean(num, row.getBoolean(i))
+      }
+      return
+    }
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.BOOLEAN)
+    } else {
+      ps.setBoolean(num, value.asInstanceOf[Boolean])
+    }
+  }
+}
+
+/**
+  * 高精度整数,浮点数
+  * @param fieldName
+  */
+class DecimalPstateSetter (fieldName:String) extends PstateSetter(fieldName) {
+
+  override def setValue(ps:PreparedStatement, num:Int, value: Any): Unit = {
+    if(value.isInstanceOf[Row]){
+      val row = value.asInstanceOf[Row]
+      val i = row.fieldIndex(fieldName)
+
+      // 该行数据的该列是 null 值
+      if(row.isNullAt(i)){
+        ps.setNull(num, Types.DECIMAL)
+      } else {
+        ps.setBigDecimal(num, row.getDecimal(i))
+      }
+      return
+    }
+    // 值是 null 的
+    if(value == null){
+      ps.setNull(num, Types.DECIMAL)
+    } else {
+      ps.setBigDecimal(num, value.asInstanceOf[BigDecimal])
+    }
+  }
+}