千家信息网

filebeat日志收集

发表于:2024-12-05 作者:千家信息网编辑
千家信息网最后更新 2024年12月05日,以nginx错误日志为例,演示日志处理流程filebeat--logstash--esfilebeat--kafka--logstash--es#filebeat使用systemd管理/usr/lib
千家信息网最后更新 2024年12月05日filebeat日志收集

以nginx错误日志为例,演示日志处理流程

filebeat--logstash--es

filebeat--kafka--logstash--es


#filebeat使用systemd管理/usr/lib/systemd/system/filebeat.service [Unit]Description=FilebeatDocumentation=http://www.elastic.coWants=network-online.targetAfter=network-online.target[Service]ExecStart=/usr/local/filebeat/filebeat -c /usr/local/filebeat/filebeat.ymlRestart=always[Install]WantedBy=multi-user.target


#logstash使用systemd管理#如果有多个logstash配置文件,可以使用-f指定目录/usr/lib/systemd/system/logstash.service [Unit]Description=logstashDocumentation=http://www.elastic.coWants=network-online.targetAfter=network-online.target[Service]Environment=JAVA_HOME=/usr/java/jdk1.8.0_211ExecStart=/usr/local/logstash/bin/logstash -f /usr/local/logstash/config/logstash.conf -l /usr/local/logstash/logsRestart=always[Install]WantedBy=multi-user.target


#启动nginx容器,映射日志目录docker run -d --name=nginx --net=host -v /tmp/nginx_log:/var/log/nginx nginx


#nginx错误日志:

2019/09/21 17:00:08 [error] 7#7: *9 open() "/usr/share/nginx/html/api" failed (2: No such file or directory), client: 192.168.3.102, server: localhost, request: "GET /api HTTP/1.1", host: "192.168.3.100"



  • filebeat--logstash--es示例

#filebeat输出logstash示例/usr/local/filebeat/filebeat.yml filebeat.inputs:- type: log  paths:    - /tmp/nginx_log/error.log    multiline.pattern: ^\d{4}/\d{2}/\d{2}\s\d{2}:\d{2}:\d{2}  #匹配nginx日志时间格式  2019/09/21 17:00:08  multiline.negate: true  multiline.match: after  exclude_files: [".gz$"]  tail_files: true    #增加输出字段,tags为数组形式,fields.id为键值对形式  tags: ["nginx-100"]  fields:    id: "nginx-100"output.logstash:  hosts: ["192.168.3.100:5044","192.168.3.101:5044"]  loadbalance: true#输出到单个logstash#output.logstash:#  hosts: ["127.0.0.1:5044"]


#logstash输出到es示例;根据fileds.id来划分索引/usr/local/logstash/config/logstash.conf input {  beats {    port => 5044  }}output {  elasticsearch {    hosts => ["http://192.168.3.100:9200","http://192.168.3.101:9200","http://192.168.3.102:9200"]    index => "%{[fields][id]}-%{+YYYY.MM.dd}"    user => "elastic"    password => "HkqZIHZsuXSv6B5OwqJ7"  }}



  • filebeat--kafka--logstash--es示例

#filebeat输出到kafka示例/usr/local/filebeat/filebeat.yml filebeat.inputs:- type: log  paths:    - /tmp/nginx_log/error.log    multiline.pattern: ^\d{4}/\d{2}/\d{2}\s\d{2}:\d{2}:\d{2}  #匹配nginx日志时间格式  2019/09/21 17:00:08  multiline.negate: true  multiline.match: after  exclude_files: [".gz$"]  tail_files: true    #增加输出字段,tags为数组形式,fields.id为键值对形式  tags: ["nginx-kafka-100"]  fields:    id: "nginx-kafka-100"output.kafka:  hosts: ["192.168.3.100:9092", "192.168.3.101:9092", "192.168.3.102:9092"]  topic: '%{[fields.id]}'  partition.round_robin:    reachable_only: false  required_acks: 1  compression: gzip  max_message_bytes: 1000000


#kafka输出到es示例/usr/local/logstash/config/logstash.confinput {    kafka {      group_id => "logstash"      topics => ["nginx-kafka-100"]      bootstrap_servers => "192.168.3.100:9092,192.168.3.101:9092,192.168.3.102:9092"      consumer_threads => "1"      fetch_max_bytes => "26214400"      codec => plain  }}filter {  json {    source => "message"  }}output {  elasticsearch {    hosts => ["http://192.168.3.100:9200","http://192.168.3.101:9200","http://192.168.3.102:9200"]    index => "%{[fields][id]}-%{+YYYY.MM.dd}"    user => "elastic"    password => "HkqZIHZsuXSv6B5OwqJ7"  }}


参考:

https://www.elastic.co/guide/en/beats/filebeat/current/kafka-output.html

https://www.elastic.co/guide/en/beats/filebeat/current/logstash-output.html

https://www.elastic.co/guide/en/logstash/current/plugins-filters-json.html


0