Logstash消费kafka同步数据到elasticsearch的配置文件

input{
   kafka{
    bootstrap_servers => ["192.169.160.25:7091,192.169.160.26:7092,192.169.160.27:7093,192.169.160.28:7094,192.169.160.29:7095"] #kafka的地址,集群有逗号隔开
        client_id => "cpm_addUser"
        group_id => "cpm_addUser"
        auto_offset_reset => "latest" #从最新的偏移量开始消费
        consumer_threads => 5
        decorate_events => true #此属性会将当前topic、offset、group、partition等信息也带到message中
        topics => ["cpm_addUser"]
        type => "cpm_adduser"
  }
}
filter {
if ([message]!= "")
        {
       if([type]=="cpm_adduser"){
mutate{
     split => ["message","||"]
        add_field =>   {
            "username" => "%{[message][0]}"
        }
        add_field =>   {
            "cpm_strategy" => "%{[message][1]}"
        }
        add_field =>   {
            "time" => "%{[message][2]}"
        }
        #删除message字段
        remove_field =>["message"]
    }}
} else{
 drop {
                }
        }
#将时间弄成东八区
ruby {
        code => "event.set('timestamp', event.get('@timestamp').time.localtime + 8*60*60)"
      }

ruby{
        code => "event.set('day', (event.get('@timestamp').time.localtime + 8*60*60).strftime('%Y.%m.%d'))"
     }

mutate{
        #转换数据类型为integer
        convert => ["time","integer"]
}

}
output {
  if [type] == "cpm_adduser"{
        elasticsearch {
        hosts => ["192.169.160.18:9200"]                  # ElasticSearch的地址加端口
        index => "cpm_adduser"            # ElasticSearch的保存文档的index名称,
    }
  }
}

WARN [Consumer clientId=consumer-1, groupId=console-consumer-950] Connection to node -1 could not be

https://blog.csdn.net/getyouwant/article/details/79000524

kafka安装及快速入门

https://blog.csdn.net/yjt520557/article/details/88558065

更多信息:https://www.cnblogs.com/willpan-z/p/10307967.html?tdsourcetag=s_pctim_aiomsg

点赞或者评论是我最大的动力,有问题欢迎留言或者联系q:1559810637  

猜你喜欢

转载自blog.csdn.net/qq_41594146/article/details/88959919