Flink(六)流处理API-Sink

一、 Kafka

1.添加依赖

<!--https://mvnrepository.com/artifact/org.apache.flink/flink-connector-kafka-0.11-->

<dependency>

    <groupId>org.apache.flink</groupId>

    <artifactId>flink-connector-kafka-0.11_2.11</artifactId>

    <version>1.7.2</version>

</dependency>

2.具体程序如下

package com.flink.sink
import java.util.Properties
import com.flink.bean.SensorReading
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer011, FlinkKafkaProducer011}

/**
  * @Author :star
  * @Date :2020/7/6 11:03
  * @Version :1.0
  */
object KafkaSink {
  def main(args: Array[String]): Unit = {
    //environment
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    //source
    val properties = new Properties()
    properties.setProperty("bootstrap.servers","hdp-1:9092")
    properties.setProperty("group.id", "consumer-group")
    properties.setProperty("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("auto.offset.reset", "latest")
    //transform
    val dataStream: DataStream[String] = env.addSource(new FlinkKafkaConsumer011[String]("first",new SimpleStringSchema(),properties))
    val sensorData: DataStream[String] = dataStream.map(
      data => {
        val dataArray: Array[String] = data.split(",")
        //toString  方便序列化
        SensorReading(dataArray(0).trim, dataArray(1).trim.toLong, dataArray(2).trim.toDouble).toString
      }
    )
    //sink
    sensorData.addSink(new FlinkKafkaProducer011[String]("hdp-1:9092","flink",new SimpleStringSchema()))
    sensorData.print()
    //execute
    env.execute()
  }
}

二、Redis

1.添加依赖

<!-- https://mvnrepository.com/artifact/org.apache.bahir/flink-connector-redis -->

<dependency>

    <groupId>org.apache.bahir</groupId>

    <artifactId>flink-connector-redis_2.11</artifactId>

    <version>1.0</version>

</dependency>

 2.具体代码如下

package com.flink.sink

import com.flink.bean.SensorReading
import org.apache.flink.shaded.netty4.io.netty.handler.codec.http2.Http2Exception.StreamException
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.redis.RedisSink
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig
import org.apache.flink.streaming.connectors.redis.common.mapper.{RedisCommand, RedisCommandDescription, RedisMapper}
import redis.clients.jedis.JedisPoolConfig

/**
  * @Author :star
  * @Date :2020/7/6 11:52
  * @Version :1.0
  */
object RedisSink {
  def main(args: Array[String]): Unit = {
    //environment
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    //source
    val dataStream: DataStream[String] = env.readTextFile("D:\\projects\\FlinkProjects\\FlinkStudy\\src\\main\\resources\\sensor.txt")
    //transform
    val sensorData: DataStream[SensorReading] = dataStream.map(
      data => {
        val dataArray: Array[String] = data.split(",")
        SensorReading(dataArray(0).trim, dataArray(1).trim.toLong, dataArray(2).trim.toDouble)
      }
    )
    val conf =  new FlinkJedisPoolConfig.Builder()
        .setHost("hdp-1")
        .setPort(6379)
        .build()
    //sink
    sensorData.addSink(new RedisSink[SensorReading](conf,new MyRedisMapper))
    //execute
    env.execute()
  }
}
class MyRedisMapper extends RedisMapper[SensorReading](){
  override def getCommandDescription: RedisCommandDescription = {
    new RedisCommandDescription(RedisCommand.HSET,"sensor_tem")
  }
  override def getKeyFromData(t: SensorReading): String = t.id
  override def getValueFromData(t: SensorReading): String = t.temprature.toString
}

三、Elasticsearch

1.添加依赖

<dependency>

    <groupId>org.apache.flink</groupId>

    <artifactId>flink-connector-elasticsearch6_2.11</artifactId>

    <version>1.7.2</version>

</dependency>

2.具体代码如下 

val httpHosts = new util.ArrayList[HttpHost]()

httpHosts.add(new HttpHost("localhost", 9200))



val esSinkBuilder = new ElasticsearchSink.Builder[SensorReading]( httpHosts, new ElasticsearchSinkFunction[SensorReading] {

override def process(t: SensorReading, runtimeContext: RuntimeContext, requestIndexer: RequestIndexer): Unit = {

println("saving data: " + t)

val json = new util.HashMap[String, String]() json.put("data", t.toString)

val indexRequest = Requests.indexRequest().index("sensor").`type`("readingData").source(json)

requestIndexer.add(indexRequest) println("saved successfully")

}

} )

dataStream.addSink( esSinkBuilder.build() )

四、自定义Sink(JDBC)

1.导入依赖

扫描二维码关注公众号,回复: 12913231 查看本文章
<!-- https://mvnrepository.com/artifact/mysql/mysql-connector-java --> 
<dependency>
    <groupId>mysql</groupId>
    <artifactId>mysql-connector-java</artifactId>
<version>8.0.15</version>

2.具体代码如下 

package com.flink.sink

import java.sql.{Connection, DriverManager, PreparedStatement}

import com.flink.bean.SensorReading
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala._

/**
  * @Author :star
  * @Date :2020/7/6 12:55
  * @Version :1.0
  */
object JdbcSink {
  def main(args: Array[String]): Unit = {
    //environment
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    //source
    val dataStream: DataStream[String] = env.readTextFile("D:\\projects\\FlinkProjects\\FlinkStudy\\src\\main\\resources\\sensor.txt")
    //transform
    val sensorData: DataStream[SensorReading] = dataStream.map(
      data => {
        val dataArray: Array[String] = data.split(",")
        SensorReading(dataArray(0).trim, dataArray(1).trim.toLong, dataArray(2).trim.toDouble)
      }
    )
    sensorData.addSink(new JdbcSink)
    env.execute()
  }
}
class JdbcSink extends RichSinkFunction[SensorReading]{
  //定义SQL连接 预编译器
  var conn : Connection = _
  var insertStmt : PreparedStatement = _
  var updataStmt : PreparedStatement = _

  //初始化 创建连接和预编译语句
  override def open(parameters: Configuration): Unit = {
    super.open(parameters)
    val url : String = "jdbc:mysql://localhost:3306/test?characterEncoding=utf8&useSSL=false&serverTimezone=UTC&rewriteBatchedStatements=true"
    conn = DriverManager.getConnection(url,"root","root")
    insertStmt = conn.prepareStatement(" INSERT INTO sonsor(sonsorId,tmp) VALUES(?,?)")
    updataStmt = conn.prepareStatement("UPDATE sonsor SET tmp = ? WHERE sonsorId = ?")
  }

  // 执行语句
  override def invoke(value: SensorReading, context: SinkFunction.Context[_]): Unit = {
    updataStmt.setDouble(1,value.temprature)
    updataStmt.setString(2,value.id)
    updataStmt.execute()
    if(updataStmt.getUpdateCount == 0){
      insertStmt.setString(1, value.id)
      insertStmt.setDouble(2, value.temprature)
      insertStmt.execute()
    }

  }
  //关流
  override def close(): Unit = {
    updataStmt.close()
    insertStmt.close()
    conn.close()
  }
}

猜你喜欢

转载自blog.csdn.net/weixin_43233971/article/details/107452214