Spark数据库的写入与读取
Spark 对于数据库的读取与写
使用 MySQL数据库的写法;
def getDataFrame(tableName: String): DataFrame = {
var _spark: SparkSession = _
val spark= _spark
var df=spark.read
.format("jdbc")
.option("driver", "com.mysql.cj.jdbc.Driver")
.option("url", host)
.option("dbtable", tableName)
.option("user", user)
.option("password", pwd)
.load()
}
//写入
def output(df: DataFrame, tableName: String, mode: SaveMode): Unit = {
df.write
.format("jdbc")
.option("driver", "com.mysql.cj.jdbc.Driver")
.option("url", SparkConfig.host)
.option("dbtable", tableName)
.option("user", SparkConfig.user)
.option("password", SparkConfig.pwd)
.option("batchsize",50000)
.mode(mode)
.save()
}