Exchange传输日志及IIS日志收集至ES中

下载filebeat

修改filebeat.yml文件

filebeat.inputs:
    - type: log
      enabled: true
      paths:#收集传输服务日志
        - D:\Program Files\Microsoft\Exchange Server\V15\TransportRoles\Logs\MessageTracking\MSGTRK2*.LOG
      tags: ["Transportlog"]
    - type: log
      enabled: true
      paths:#收集IIS日志
        - C:\inetpub\logs\LogFiles\*\*.log 
      tags: ["Iislog"]
output.kafka:
  hosts: ["192.168.110.64:9092", "192.168.110.65:9092", "192.168.110.66:9092"]
  topic: 'Exchange_MailFE01_Logs'
  partition.round_robin:
    reachable_only: false

  required_acks: 1
  compression: gzip
  max_message_bytes: 1000000

拷贝filebet文件夹至Exchange Server C:\Program Files下

以管理员身份打开powershell命令窗口

cd "C:\Program Files\filebeat"
Start-Service filebeat

检查是否在kafka中生成topic

cd /usr/share/kafka/kafka_2.13-2.6.0
bin/kafka-topics.sh --zookeeper 192.168.110.64 --list

在logstash中读取kafka数据至ES

cd /etc/logstash/conf.d/
vim exchange_mailbe01_logs.conf
------------------------------------------------------------
input {
    kafka {#从kafka群集中读取topic
        topics => ["Exchange_MailBE01_Logs"] 
        bootstrap_servers => "192.168.110.64:9092,192.168.110.65:9092,192.168.110.66:9092"
        consumer_threads => 5
        decorate_events => true
        codec => "json"
   }
}
filter {
	if "Transportlog" in [tags] {#过滤Exchange传输日志
		csv {
          columns => ["date-time","client-ip","client-hostname","server-ip","server-hostname","source-context","connector-id","source","event-id","internal-message-id","message-id","network-message-id","recipient-address","recipient-status","total-bytes","recipient-count","related-recipient-address","reference","message-subject","sender-address","return-path","message-info","directionality","tenant-id","original-client-ip","original-server-ip","custom-data","transport-traffic-type","log-id","schema-version"]
          remove_field => ["message", "tenant-id", "schema-version"]
        }
        ruby {
          code => "event.set('timestamp', event.get('@timestamp').time.localtime + 8*3600)"
        }
        ruby {
          code => "event.set('Transportlog_date',event.get('timestamp'))"
        }
        mutate {
          convert => [ "Transportlog_date", "string" ]
          gsub => [ "index" ,"@" ," "]
          convert => [ "total-bytes", "integer" ]
          convert => [ "recipient-count", "integer" ]
          split => [ "recipient_address", ";" ]
          remove_field => [ "agent" ,"ecs"]
        }
	}
	if "Iislog" in [tags] {#过滤Exchange IIS 日志
		grok {
		  match => { "message" => "%{TIMESTAMP_ISO8601:timestamp} %{IPORHOST:hostip} %{WORD:method} %{URIPATH:page} %{NOTSPACE:query} %{NUMBER:port} %{NOTSPACE:username} %{IPORHOST:clientip} %{NOTSPACE:useragent} %{NOTSPACE:referrer} %{NUMBER:response} %{NUMBER:subresponse} %{NUMBER:scstatus} %{NUMBER:timetaken}"}
		}
	}
}
output {
	if "Transportlog" in [tags] { #输出传输日志至ES群集
        elasticsearch {
                hosts => ["192.168.110.61:9200","192.168.110.62:9200","192.168.110.63:9200"]
                index => "mailbe01_transport_logs"
       }
	    stdout { codec => rubydebug }
    }
	if "Iislog" in [tags] {#输出IIS日志至ES群集
		elasticsearch {
                hosts => ["192.168.110.61:9200","192.168.110.62:9200","192.168.110.63:9200"]
                index => "mailbe01_iis_logs"
       }
	    stdout { codec => rubydebug }
	}
}