由於logstash會自動將@timestamp時間轉成UTC時間,網上搜了很多種方法無果後。只好從日誌中提取年月日來進行按北京時間分表。
日誌格式JSON:
{"@timestamp":"2016-08-22T09:13:06.024Z",......}
input { redis { host => "192.168.1.21" port => "6379" key => "filebeat" data_type => "list" password => "nginx_pass" threads => 50 } } filter { mutate { gsub => ["message", "\\x", "\\\x"] } if [type] == "nginxacclog" { json { source => "message" } ruby { code => "event['ctime'] = event['@timestamp'].time.localtime" #code => "event['ctime'] = event['@timestamp'].time.localtime + 8*60*60" } grok{ match => { "ctime" => ["%{INT:years_dik3k}-%{INT:months_dik3k}-%{INT:days_dik3k}T*"]} } grok { add_field => ["type_xi09wnk", "nginxacclog"] match => ["requesturi", "%{URIPATH:baseurl}(?:\?%{NOTSPACE:request})"] remove_tag => "requesturi" } kv { source => "request" field_split => "&?" value_split => "=" } kv { source => "post_requ" field_split => "&?" value_split => "=" } } } output { if [type_xi09wnk] == "nginxacclog" { mongodb{ collection => "nginx_log%{years_dik3k}%{months_dik3k}%{days_dik3k}" isodate => true database => "logdb" uri => "mongodb://user:[email protected]:27017/logdb" } }