Kafka结合Avro、confluent进行序列化和反序列化

首先需要添加如下的依赖:

<!-- https://mvnrepository.com/artifact/org.apache.avro/avro -->
<dependency>
    <groupId>org.apache.avro</groupId>
    <artifactId>avro</artifactId>
    <version>1.9.0</version>
</dependency>
 
<dependency>
      <groupId>org.apache.avro</groupId>
      <artifactId>avro-tools</artifactId>
      <version>1.9.0</version>
    </dependency>

而confluent的相关包在maven上是找不到的。需要自己手动添加:

需要这些包。

而后,需要在linux上安装confluent:

我选择安装的是1.0版本。

tar包下载地址:https://www.confluent.io/previous-versions/

下载后,解压。

启动confluent:(注意:在启动之前一定要先启动zookeeper和kafka!)

[root@confluent confluent-4.1.1]# bin/schema-registry-start etc/schema-registry/schema-registry.properties
# 省略一些内容......
[2018-06-22 16:10:26,442] INFO Server started, listening for requests... (io.confluent.kafka.schemaregistry.rest.SchemaRegistryMain:45)

启动成功后,就可以编写客户端程序了:

生产者:

public class AvroTest2 {
    public static void main(String[] args) {
        Properties kafkaProps=new Properties();
        kafkaProps.put("bootstrap.servers","192.168.123.66:9092");
        kafkaProps.put("key.serializer","io.confluent.kafka.serializers.KafkaAvroSerializer");
        kafkaProps.put("value.serializer","io.confluent.kafka.serializers.KafkaAvroSerializer");
        kafkaProps.put("schema.registry.url", "http://192.168.123.66:8081");

       // String schemaString="{\"namespace\": \"CustomerManagerAvro.avro\", \"type\": \"record\", \"name\": \"Customer\", \"fields\": [{\"name\": \"id\", \"type\": \"int\"}, {\"name\": \"name\",  \"type\": \"string\"}, {\"name\": \"email\", \"type\":  \"string\", \"default\": \"null\"}]}";
        String schemaString="{\n" +
                "    \"namespace\": \"CustomerManagerAvro\",\n" +
                "    \"type\": \"record\",\n" +
                "    \"name\": \"Customer\",\n" +
                "    \"fields\": [\n" +
                "        {\"name\": \"id\", \"type\": \"int\"},\n" +
                "        {\"name\": \"name\",  \"type\": \"string\"},\n" +
                "        {\"name\": \"email\", \"type\": [\"null\", \"string\"]}\n" +
                "    ]\n" +
                "}\n";
    KafkaProducer producer=new KafkaProducer<String, GenericRecord>(kafkaProps);
        Schema.Parser parser = new Schema.Parser();
        Schema schema = parser.parse(schemaString);



        GenericData.Record customer = new GenericData.Record(schema);
        customer.put("id",3);
        customer.put("name","谢飞");
        customer.put("email","[email protected]");
        ProducerRecord<String,GenericRecord> record = new ProducerRecord<String, GenericRecord>("CustomerTest",customer);
        try {
        producer.send(record).get();
    } catch (InterruptedException e) {
        e.printStackTrace();
    } catch (
    ExecutionException e) {
        e.printStackTrace();
    }

}
}

消费者:

public class DeAvroTest {
    public static void main(String[] args) {
        Properties props = new Properties();
        props.put("bootstrap.servers", "192.168.123.66:9092");
        props.put("group.id", "CountryCounter");
        props.put("key.deserializer",  "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer","io.confluent.kafka.serializers.KafkaAvroDeserializer");
        props.put("schema.registry.url", "http://192.168.123.66:8081");
        KafkaConsumer<String, GenericRecord> consumer = new KafkaConsumer<String, GenericRecord>(props);


        //订阅主题
        //consumer.subscribe(Collections.singletonList("test"));//从分区的最新的偏移量开始
        //从分区的开头开始
        TopicPartition tp = new TopicPartition("CustomerTest", 0);
        List<TopicPartition> list = new ArrayList<TopicPartition>();
        list.add(tp);
        consumer.assign(list);//需要指派主题分区列表
        consumer.seekToBeginning(tp);//从指定主题的特定分区开始
        //轮询
        try{
            while (true)
            {
                ConsumerRecords<String,  GenericRecord> records = consumer.poll(100);
                for (ConsumerRecord<String, GenericRecord> record : records) {

                    try {
                        System.out.println(record.value());
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }
        }catch (Exception e)
        {
            e.printStackTrace();
        }finally {
            consumer.close();
            System.out.println("测试消费者!");
        }
    }
}

可以看到消费者拿到消息:

当然,我们可以只拿到特定的数据,比如name,就可以使用:

 System.out.println(record.value().get("name"));

 

可以看到打印出来了名字。 

我们可以在linux中查看所有的schema:

ps:1.我们在客户端中写了schema后就会直接在confluent中注册schema。

2.confluent的maven仓库:http://packages.confluent.io/maven/io/

需要修改我们maven的配置文件。。。。。我这里直接使用下载的jar包。

发布了241 篇原创文章 · 获赞 94 · 访问量 13万+

猜你喜欢

转载自blog.csdn.net/weixin_41060905/article/details/104008336