拷贝jar包并启动
[root@hw1 ~]
[root@hw1 bin]
Spark读取mysql文件
scala> val prop = new java.util.Properties
prop: java.util.Properties = {}
scala> prop.setProperty("driver","com.mysql.jdbc.Driver")
res0: Object = null
scala> prop.setProperty("user","root")
res1: Object = null
scala> prop.setProperty("password","root")
res2: Object = null
scala> val jdbcDF = spark.read.jdbc("jdbc:mysql://192.168.56.122:3306/mydemo","users",prop);
scala> jdbcDF.show
+---+--------+----------+
| id|username| birthday|
+---+--------+----------+
| 1|zhangsan|1997-09-08|
| 2| lisi|1993-06-08|
| 3| wangwu|1993-06-01|
+---+--------+----------+
// .mode("append") 固定格式
scala> jdbcDF.filter($"username".endsWith("n")).write.mode("append").jdbc("jdbc:mysql://192.168.56.122:3306/mydemo","xixi",prop)
通过Spark SQL 读取mysql 数据并存储至 hive
DateFrame 类型转换
scala> a.select(a.col("id"),a.col("username"),a.col("birthday").cast("string").as("bir")).write.saveAsTable("hhhh")
[root@hw1 sbin]
scala> spark.stop
scala> import org.apache.spark.sql.SparkSession
scala> val prop = new java.util.Properties
prop: java.util.Properties = {}
scala> prop.setProperty("driver","com.mysql.jdbc.Driver")
res2: Object = null
scala> prop.setProperty("user","root")
res3: Object = null
scala> prop.setProperty("password","root")
res4: Object = null
scala> val spark = SparkSession.builder().appName("spark-hive").enableHiveSupport.getOrCreate
scala> val a = spark.read.jdbc("jdbc:mysql://192.168.56.122:3306/mydemo","users",prop)
a.filter($"username".startsWith("s")).select(a.col("id"),a.col("username"),a.col("birthday").cast(String).as("bir")).show
scala> a.select(a.col("id"),a.col("username"),a.col("birthday").cast("string").as("bir")).show
+---+--------+----------+
| id|username| bir|
+---+--------+----------+
| 1|zhangsan|1997-09-08|
| 2| lisi|1993-06-08|
| 3| wangwu|1993-06-01|
+---+--------+----------+
scala> a.select(a.col("id"),a.col("username"),a.col("birthday").cast("string").as("bir")).write.saveAsTable("hhhh")