1、原始数据源服务器,从日志拉取数据,并转化为utf8编码,增加ip地址,保存到Redis中,上配置如下:
[plain] view plain copy
input {
file {
path => "/tmp/123server.log"
codec => json {
charset => "GBK"
}
start_position => "beginning"
sincedb_path => "/dev/null"
type => "activitysun"
}
}
filter {
date {
match => ["timestamp", "UNIX"]
remove_field => ["time"]
}
#ruby {
# code => "event.timestamp.time.localtime"
#}
mutate {
#some pc no host
replace => { "host" => "192.168.10.11" }
}
}
output {
#stdout {
# codec => plain {
# charset => "UTF-8"
# }
#}
file {
path => "/tmp/logstash.log"
codec => json {
charset => "UTF-8"
}
}
redis {
host => ["192.168.10.18"]
port => 16378
data_type => "list"
key => "123server"
codec => json {
charset => "UTF-8"
}
}
}
采用如下命令启动
[plain] view plain copy
/data/logstash-2.3.4/bin/logstash -f activitylog.conf
2、收集服务器,从redis拉取数据,放入elasticsearch,时间做一下处理,方便分析。配置如下:
[plain] view plain copy
input {
redis {
host => ["192.168.10.18"]
port => 26378
data_type => "list"
key => "123server"
codec => json {
charset => "UTF-8"
}
#type => "activitysun"
}
}
filter {
ruby {
code=>"event['daytag']=event.timestamp.time.localtime.strftime('%Y.%m.%d')"
}
}
output {
elasticsearch {
hosts => ["127.0.0.1:19200"]
index => "%{type}-%{daytag}"
#index => "%{type}-%{+yyyy.MM.dd}"
}
}
3、kibana展示
filebeat.yml
filebeat.prospectors:
- input_type: log
paths:
- /log/123.log
#- /home/haoren/data/filebeat-5.0.2-linux-x86_64/test.log
encoding: gbk
symlinks: true
include_lines: ['\[.*?统计\]','\[.*?结算\]']
document_type: pchannelserver
fields_under_root: true
fields:
host: 192.168.10.15
processors:
- drop_fields:
#fields: ["beat.hostname", "beat.name", "beat.version", "input_type", "beat"]
fields: ["input_type", "beat", "offset", "source"]
output.redis:
enabled: true
hosts: ["192.168.10.18"]
password: "8841c09BAD52E63067C4DA"
port: 15377
datatype: list
key: "filebeat"
db: 0
output.file:
enabled: true
path: "/tmp/filebeat"
output.console:
enabled: false