Spark读取MySQL
import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import java.util.Properties;
public class FromMySQL {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setMaster("local[2]").setAppName("spark_mysql");
SparkSession sparkSession = SparkSession.builder().config(conf).getOrCreate();
Properties properties = new Properties();
properties.setProperty("user", "root");
properties.setProperty("password", "123456");
properties.setProperty("driver", "com.mysql.cj.jdbc.Driver");
properties.setProperty("numPartitions","2");
Dataset<Row> words1 = sparkSession.read().jdbc("jdbc:mysql://localhost:3306/test", "words1", properties).select("word","count").where("count>=3");
words1.show();
words1.printSchema();
sparkSession.stop();
}
}
结果:
|