Spark读写mysql数据库

import org.apache.spark.SparkConf;
import org.apache.spark.sql.*;
import java.util.Properties;


public class SparkSqlJdbc {

    public static void main(String[] args) {

        SparkConf conf = new SparkConf();
        conf.setAppName("mytext");
        conf.setMaster("local");
        conf.set("spark.testing.memory", "2147480000");
        SparkSession session=SparkSession.builder().config(conf).getOrCreate();
        String url = "jdbc:mysql://192.168.0.116:3306/bigdata" ;
        //数据库配置配置信息
         Dataset<Row> df = session.read().format("jdbc")
                .option("url",url).option("user","root").option("password","root")
                .option("dbtable","words").option("driver","com.mysql.jdbc.Driver").load();

        try {
            df.createTempView("users");
            Dataset<Row> df2 =session.sql("select * from users ");
            df2.show();

            //将数据写入到数据库,若是没有表将自动 建立新表
            Properties prop = new Properties();
            prop.put("user","root");
            prop.put("password","root");
            prop.put("dirver","com.mysql.jdbc.Driver");
            df2.write().jdbc(url,"wordtest",prop);
        } catch (AnalysisException e) {
            e.printStackTrace();
}

    }
}