使用filebeat替代logstash收集日志

本文以采集 nginx 日志为例,贴出实现过程中各服务的具体配置。若初次接触 ELK,可查看 ELK 基本部署以及使用 进行初步了解。

Nginx

nginx.conf

1

log_format access_json escape=json ‘{“@timestamp”:”$time_iso8601”,’
‘“host”:”$server_addr”,’
‘“real-host”:”$server_addr”,’
‘“clientip”:”$remote_addr”,’
‘“real-ip”:”$Real”,’
‘“size”:$body_bytes_sent,’
‘“responsetime”:$request_time,’
‘“upstreamtime”:”$upstream_response_time”,’
‘“upstreamhost”:”$upstream_addr”,’
‘“http_host”:”$host”,’
‘“url”:”$request_uri”,’
‘“domain”:”$host”,’
‘“xff”:”$http_x_forwarded_for”,’
‘“referer”:”$http_referer”,’
‘“status”:”$status”}’;

1

Filebeat

Dockerfile

1
2
ARG ELK_VERSION=7.6.1
FROM elastic/filebeat:${ELK_VERSION}

filebeat.yml

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
filebeat.inputs:
##### nginx-access-log ###########
- type: log
enabled: true
paths:
- "/log/access_log.log"
fields:
type: nginx-access-log
fields_under_root: true
##### nginx-error-log ###########
- type: log
enabled: true
paths:
- "/log/log.log"
fields:
type: nginx-error-log
fields_under_root: true

filebeat.config:
modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false

processors:
- add_cloud_metadata: ~
- add_docker_metadata: ~

#----------------------------- Logstash output --------------------------------
output.logstash:
# The Logstash hosts
hosts: ["logstash:5044"]

#============================== X-Pack Monitoring ===============================
xpack.monitoring.enabled: true
xpack.monitoring.elasticsearch.hosts: [ "elasticsearch:9200" ]
xpack.monitoring.elasticsearch.username: "elastic"
xpack.monitoring.elasticsearch.password: "xxxxxx"

logstash

filebeat.conf

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
input {
beats {
port => "5044"
#codec => plain{charset => "ISO-8859-1"}
}
}

filter {
if [type] == "nginx-access-log" {
json {
source => "message"
}
mutate {
split => [ "upstreamtime", "," ]
convert => [ "upstreamtime", "float" ]
rename => { "[real-host]" => "host" }
rename => {"[upstreamtime][0]" => "upstreamtime"}
remove_field => ["message"]
}
}
if [type] == "nginx-error-log" {
mutate {
split => [ "upstreamtime", "," ]
convert => [ "upstreamtime", "float" ]
rename => {"[upstreamtime][0]" => "upstreamtime"}
rename => { "[host][name]" => "host" }
}
}
mutate {
remove_field => ["log", "ecs", "agent", "tags", "input"]
}
}

output {
if [type] == "nginx-access-log" {
elasticsearch {
hosts => ["elasticsearch:9200"]
index => "nginx-access-log-%{+YYYY.MM.dd}"
user => "elastic"
password => "xxxxxx"
}
# stdout { codec => rubydebug }
}
if [type] == "nginx-error-log" {
elasticsearch {
hosts => ["elasticsearch:9200"]
index => "nginx-error-log-%{+YYYY.MM.dd}"
user => "elastic"
password => "xxxxxx"
}
# stdout { codec => rubydebug }
}
}