import org.apache.spark.sql.SparkSession
object JDBCRead {
def main(args: Array[String]): Unit ={
val spark = SparkSession.builder().master("local[*]").appName("JDBCRead").getOrCreate()
import spark.implicits._
val url ="jdbc:mysql://hadoop102:3306/sparksql"
val user ="root"
val pw ="root"
val df = spark.read
.option("url", url).option("user", user).option("password", pw).option("dbtable","user").format("jdbc").load()
df.show()
spark.close()}}
专用写法
import java.util.Properties
import org.apache.spark.sql.SparkSession
object JDBCRead1 {
def main(args: Array[String]): Unit ={
val spark = SparkSession.builder().master("local[*]").appName("JDBCRead").getOrCreate()
val url ="jdbc:mysql://hadoop102:3306/sparksql"
val user ="root"
val pw ="root"
val props =newProperties()
props.put("user",user)
props.put("password",pw)
val df = spark.read
.jdbc(url,"user",props)
df.show()
spark.close()}}
向 jdbc 写入数据
通用写法
import org.apache.spark.sql.{SaveMode, SparkSession}
object JDBCWrite {
def main(args: Array[String]): Unit ={
val spark = SparkSession.builder().master("local[*]").appName("JDBCRead").getOrCreate()
val df = spark.read.json("D:\\idea\\spark-sql\\input\\user.json")
val url ="jdbc:mysql://hadoop102:3306/sparksql"
val user ="root"
val pw ="root"//写到jdbc中
df.write
.format("jdbc").option("url",url).option("user",user).option("password",pw).option("dbtable","user1602")// .mode("append").mode(SaveMode.Overwrite).save()
spark.close()}}
专用写法
import java.util.Properties
import org.apache.spark.sql.{SaveMode, SparkSession}
object JDBCWrite1 {
def main(args: Array[String]): Unit ={
val spark = SparkSession.builder().master("local[*]").appName("JDBCRead").getOrCreate()
val df = spark.read.json("D:\\idea\\spark-sql\\input\\user.json")
val url ="jdbc:mysql://hadoop102:3306/sparksql"
val user ="root"
val pw ="root"//写到jdbc中
val props =newProperties()
props.put("user",user)
props.put("password",pw)
df.write
.jdbc(url,"user1603",props)
spark.close()}}