将流数据输出到Mysql中

outputMysqlApp.scala

import java.sql.DriverManager

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext }

object outputMysqlApp extends App {

  //配置入口点
  val conf = new SparkConf().setAppName(getClass.getSimpleName).setMaster("local[2]")
  val ssc= new StreamingContext(conf, Seconds(1))

  //输入数据流(DStream)
  val lines = ssc.socketTextStream("localhost", 9999)

  //todo...
  val words = lines.flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_)



//  方式三:
    words.foreachRDD ( rdd => {
      rdd.foreachPartition(partitionOfRecords => {

        if (partitionOfRecords.size > 0) {
          val connection = createNewConnection()
          partitionOfRecords.foreach(record => {
            val sql = "insert into wordcount(word, wordcount) vlaues('" + record._1 + "'," + record._2 + ")"
            connection.createStatement().execute(sql)
          })

          connection.close()
        }
      })
    })

  //启动StreamingContext,接收数据,然后处理数据
  ssc.start()
  ssc.awaitTermination()

  //创建Mysql数据库连接/**
  /**
    * 获取Mysql数据库连接
     * @return    注意返回值,这块不能为空
    */
  def createNewConnection()= {
    Class.forName("com.mysql.jdbc.Driver")
    DriverManager.getConnection("jdbc:mysql://192.168.1.100:3306/streaming_mysql","root","root")
  }
}

猜你喜欢

转载自www.cnblogs.com/suixingc/p/jiang-liu-shu-ju-shu-chu-daomysql-zhong.html