elk7.0精细配置

1、filebeat配置:

filebeat.inputs:
- type: log
  paths: - /root/channelHandle-out-2.log fields: log_file: xsj_channelhandle_out_2 log_type: a-out-log fields_under_root: true encoding: utf-8 - type: log paths: - /root/channelHandle-err-2.log fields: log_file: xsj_channelhandle_err_2 log_type: a-err-log fields_under_root: true encoding: utf-8 processors: - drop_event: when.not.contains: message: "收到" output.redis: hosts: ["10.0.1.223:6700"] db: 0 password: "[email protected]" key: "%{[log_file]:xsj}" timeout: 5

./filebeat -e 
 
2、logstatsh
input { 
  redis {
    host => "127.0.0.1"
    port => "6700" password => "[email protected]" data_type => "list" key => "test_channelhandle_out_2" } } input { redis { host => "127.0.0.1" port => "6700" password => "[email protected]" data_type => "list" key => 'test_channelhandle_err_2' } } filter { if [log_type] == "a-out-log" { mutate { rename => {"[host][name]" => "host_name" } remove_field => ["ecs", "input", "log", "agent", "host", "param"] } grok { match => { "message" => ["(?<recTime>(\d+-){2}\d+\s+(\d+:){2}\d+).*?收到.*?(?<content>{.*})", "^收到.*?(?<content>{.*})"] } } json { source => "content" } mutate { remove_field => ["content", "param"] } date { match => [ "reqTime", "yyyy-MM-dd HH:mm:ss", "UNIX"] timezone => "Asia/Shanghai" target => ["@timestamp"] } if [recTime] { date { match => ["recTime", "yyyy-MM-dd HH:mm:ss", "UNIX"] target => ["recTime"] } ruby { init => "require 'time'" code => "duration = (event.get('recTime') - event.get('@timestamp')); event.set('duration', duration)" } mutate { remove_field => ["recTime"] } } } } output { elasticsearch { hosts => ["10.0.1.221:9200", "10.0.1.222:9200"] index => "%{[log_file]}_%{+YYYYMMdd}" } #stdout { codec => rubydebug} } 

./logstash -f config/logstash-redis.yml

supervisor进程管理配置文件:

[program:logstash]
command=/data/logstash/bin/logstash -f /data/logstash/config/logstash-redis.yml
autostart=true
autorestart=true logfile_maxbytes=50MB logfile_backups=5 environment=JAVA_HOME=/usr/local/jdk stdout_logfile=/var/log/supervisor/logstash.out.log stderr_logfile=/var/log/supervisor/logstash.err.log 

3、 redis

supervisor 进程管理启动文件:

[program:redis]
command=/usr/local/bin/redis-server /data/redis/conf/redis-6700.conf
autostart=true autorestart=true logfile_maxbytes=50MB logfile_backups=5 stdout_logfile=/var/log/supervisor/redis.out.log stderr_logfile=/var/log/supervisor/redis.err.log 

redis 中从配置(redis/config/redis-6700.conf)

slaveof 10.0.1.223 6700
masterauth [email protected] slave-serve-stale-data yes slave-read-only yes 

4、java环境配置(elastic\logstash)

[root@server01 src]$ tar -zvxf jdk-8u151-linux-x64.tar.gz -C /data/app/
[root@server01 src]$ ln -s /data/app/jdk1.8.0_151 /data/app/jdk
[root@server01 src]$ cat <<EOF >> /etc/profile
export JAVA_HOME=/data/app/jdk
PATH=$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$PATH CLASSPATH=.$CLASSPATH:$JAVA_HOME/lib:$JAVA_HOME/jre/lib:$JAVA_HOME/lib/tools.jar EOF [root@server01 src]$ source /etc/profile 

5、elasticsearch

/etc/sysctl.conf

vm.max_map_count = 655350

/etc/security/limits.conf

*  -  nofile 102400
*  -  nproc 4096

修改/etc/supervisord.conf

minfds=102400 

配置子进程管理文件:/etc/supervisord.d/elastic.ini

[program:elasticsearch]
user=elkuser
command=/data/elasticsearch/bin/elasticsearch
environment=ES_HEAP_SIZE=2g minfds=102400 minprocs=32768 autostart=true autorestart=true logfile_maxbytes=50MB logfile_backups=5 stdout_logfile=/var/log/supervisor/elasticsearch.out.log stderr_logfile=/var/log/supervisor/elasticsearch.err.log 

kibana 汉化: 配置文件修改:i18n.locale: "zh-CN"

6、 logstash 解析nginx日志:

nginx日志格式:

XSJ_WSS_NGINX
log_format main '$remote_addr $http_X_Forwarded_For [$time_local] ' '$upstream_addr "$upstream_response_time" "$request_time" ' '$http_host $request ' '"$status" $body_bytes_sent "$http_referer" ' '"$http_accept_language" "$http_user_agent" '; 

解析编码 /data/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-patterns-core-4.1.2/patterns

XSJ_WSS_NGINX_GROK
vim nginx

NGINX_ACCESS %{IPORHOST:clientip} %{NOTSPACE:http_x_forwarded_for} \[%{HTTPDATE:timestamp}\] (?<upstream_addr>%{IPORHOST}:%{NUMBER}) \"%{NUMBER:upstream_response}\" \"%{NUMBER:request_time}\" (?<http_host>%{IPORHOST}:%{NUMBER}) %{NOTSPACE:request} /.*? \"%{NUMBER:status}\" (?:%{NUMBER:sent_bytes}|-) (\".*\"?){2} %{QS:agent}
LEYOU_API_NGINX_GROK
%{IPORHOST:clientip}.*?\[%{HTTPDATE:timestamp}\] \"(?<request>%{WORD}) (?<request_url>.*?) .*?\" %{NUMBER:status} (?:%{NUMBER:sent_bytes}|-) (\".*?)\" %{QS:agent} %{NOTSPACE:http_x_forwarded_for}

logstash规则解析:

input { 
  redis {
    host => "127.0.0.1"
    port => "6700" password => "[email protected]" data_type => "list" key => "xsj_channelhandle_out_2" } } input { redis { host => "127.0.0.1" port => "6700" password => "[email protected]" data_type => "list" key => 'xsj_channelhandle_err_2' } } input { redis { host => "127.0.0.1" port => "6700" password => "[email protected]" data_type => "list" key => 'xsj_wss_nginx_access' } } filter { mutate { rename => {"[host][name]" => "host_name" } remove_field => ["ecs", "input", "log", "agent", "host"] } if [log_type] == "a-out-log" { grok { match => { #"message" => ".*?收到.*?(?<content>{.*})" "message" => ["(?<recTime>(\d+-){2}\d+\s+(\d+:){2}\d+).*?收到.*?(?<content>{.*})", "^收到.*?(?<content>{.*})"] } } json { source => "content" remove_field => ["content"] } mutate { remove_field => ["content", "param"] } date { match => [ "reqTime", "yyyy-MM-dd HH:mm:ss", "UNIX"] timezone => "Asia/Shanghai" target => ["@timestamp"] } if [recTime] { date { match => ["recTime", "yyyy-MM-dd HH:mm:ss", "UNIX"] target => ["recTime"] } ruby { init => "require 'time'" code => "duration = (event.get('recTime') - event.get('@timestamp')); event.set('duration', duration)" } mutate { remove_field => ["recTime"] } } } if [log_type] == "nginx-access" { grok { match => [ "message" , "%{NGINX_ACCESS}"] overwrite => [ "message" ] remove_tag => ["_grokparsefailure"] } mutate { convert => ["response", "integer"] convert => ["bytes", "integer"] convert => ["responsetime", "float"] } geoip { source => "http_x_forwarded_for" target => "geoip" add_tag => [ "nginx-geoip" ] } date { match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z"] target => ["@timestamp"] } } } output { elasticsearch { hosts => ["10.0.1.221:9200", "10.0.1.222:9200"] index => "%{[log_file]}_%{+YYYYMMdd}" } #stdout { codec => rubydebug} }

猜你喜欢

转载自www.cnblogs.com/capable/p/10990741.html