此程序根据Spark官网示例 Spark官网
def WriteToCK(): Unit ={
System.setProperty("hadoop.home.dir", "D:\\softWare\\ruanjian\\spark\\hadoop-2.7.7")
val spark = SparkSession
.builder()
.master("local")
.config("spark.sql.caseSensitive", "true")
.getOrCreate()
spark.sparkContext.setLogLevel("warn")
val dataFrame = spark.read.json("D://a.log")
val connectionProperties = new Properties()
connectionProperties.setProperty("user", username)
connectionProperties.setProperty("password", password)
dataFrame.write.mode("append").option("batchsize", "50000").option("isolationLevel", "NONE").
option("numPartitions", "1").jdbc("jdbcUrl", "表名", connectionProperties)
}
|