This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
input { | |
file { | |
path => "/var/log/nginx/access.log" | |
type => "nginx-access" | |
sincedb_path => "/var/log/.nginxaccesssincedb" | |
} | |
} | |
input { | |
file { | |
path => "/var/log/nginx/error.log" | |
type => "nginx-error" | |
sincedb_path => "/var/log/.nginxerrorsincedb" | |
} | |
} | |
input { | |
redis { | |
host => "127.0.0.1" | |
data_type => "list" | |
key => "logstash" | |
} | |
} | |
input { | |
tcp { | |
type => "syslog" | |
port => "514" | |
} | |
} | |
input { | |
tcp { | |
type => "VMware" | |
port => "1514" | |
} | |
} | |
input { | |
tcp { | |
type => "vCenter" | |
port => "1515" | |
} | |
} | |
input { | |
tcp { | |
type => "Netscaler" | |
port => "1517" | |
} | |
} | |
input { | |
tcp { | |
type => "eventlog" | |
port => "3515" | |
format => "json" | |
} | |
} | |
input { | |
tcp { | |
type => "iis" | |
port => "3525" | |
codec => "json_lines" | |
} | |
} | |
filter { | |
if [type] == "syslog" { | |
dns { | |
reverse => [ "host" ] | |
action => "replace" | |
} | |
mutate { | |
add_tag => [ "syslog" ] | |
} | |
} | |
if [type] == "VMware" { | |
mutate { | |
add_tag => "VMware" | |
} | |
} | |
if [type] == "vCenter" { | |
mutate { | |
add_tag => "vCenter" | |
} | |
} | |
if [type] == "PFsense" { | |
mutate { | |
add_tag => "PFsense" | |
} | |
} | |
if [type] == "Netscaler" { | |
mutate { | |
add_tag => "Netscaler" | |
} | |
} | |
if [type] == "eventlog" { | |
mutate { | |
add_tag => [ "WindowsEventLog" ] | |
} | |
} | |
if [type] == "apache" { | |
mutate { | |
add_tag => [ "apache" ] | |
} | |
} | |
if [type] =~ "nginx" { | |
mutate { | |
add_tag => [ "nginx" ] | |
} | |
} | |
if [type] == "iis" { | |
mutate { | |
add_tag => [ "IIS" ] | |
} | |
} | |
} | |
filter { | |
if [type] == "syslog" { | |
mutate { | |
remove_tag => "Ready" | |
} | |
} | |
} | |
# First layer of normal syslog parsing | |
filter { | |
if "syslog" in [tags] { | |
grok { | |
match => { "message" => "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" } | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{host}" ] | |
} | |
syslog_pri { } | |
date { | |
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] | |
} | |
if !("_grokparsefailure" in [tags]) { | |
mutate { | |
replace => [ "host", "%{syslog_hostname}" ] | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
} | |
if [syslog_hostname] =~ /.*?(nsvpx).*?(everythingshouldbevirtual.local)?/ { | |
mutate { | |
add_tag => [ "Netscaler" ] | |
} | |
} | |
if [syslog_hostname] =~ /.*?(pfsense).*?(everythingshouldbevirtual.local)?/ { | |
mutate { | |
add_tag => [ "PFSense" ] | |
} | |
} | |
} | |
} | |
# Setting up IPTables firewall parsing | |
filter { | |
if "syslog" in [tags] { | |
if "IPTables" in [message] { | |
grok { | |
match => { "message" => "%{IPTABLES}" } | |
patterns_dir => [ "/opt/logstash/patterns" ] | |
} | |
mutate { | |
add_tag => [ "IPTABLES" ] | |
} | |
geoip { | |
source => "src_ip" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
} | |
mutate { | |
convert => [ "[geoip][coordinates]", "float" ] | |
} | |
} | |
} | |
} | |
# Setting up IPTables actions | |
filter { | |
if "IPTABLES" in [tags] { | |
grok { | |
match => [ | |
"message", "IPTables-%{WORD:iptables_action}" | |
] | |
} | |
grok { | |
match => [ | |
"message", "PROTO=%{WORD:iptables_proto}" | |
] | |
} | |
mutate { | |
remove_field => [ "proto" ] | |
} | |
mutate { | |
rename => [ "iptables_proto", "proto" ] | |
} | |
} | |
} | |
# Setting up HAProxy parsing | |
filter { | |
if "syslog" in [tags] { | |
if [syslog_program] == "haproxy" { | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "%{HAPROXYHTTP}", | |
"message", "%{HAPROXYTCP}" | |
] | |
add_tag => [ "HAProxy" ] | |
} | |
geoip { | |
source => "client_ip" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
} | |
mutate { | |
convert => [ "[geoip][coordinates]", "float" ] | |
replace => [ "host", "%{@source_host}" ] | |
rename => [ "http_status_code", "response" ] | |
rename => [ "http_request", "request" ] | |
rename => [ "client_ip", "src_ip" ] | |
} | |
} | |
} | |
} | |
# Setting up KeepAliveD parsing | |
filter { | |
if "syslog" in [tags] { | |
if [syslog_program] =~ /Keepalived/ { | |
mutate { | |
add_tag => [ "KeepAliveD" ] | |
} | |
} | |
} | |
} | |
# Filtering for SSH logins either failed or successful | |
filter { | |
if "syslog" in [tags] { | |
if [syslog_program] == "sshd" { | |
if "Failed password" in [message] { | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "invalid user %{DATA:UserName} from %{IP:src_ip}", | |
"message", "for %{DATA:UserName} from %{IP:src_ip}" | |
] | |
} | |
mutate { | |
add_tag => [ "SSH_Failed_Login" ] | |
} | |
} | |
if "Accepted password" in [message] { | |
grok { | |
match => [ | |
"message", "for %{DATA:UserName} from %{IP:src_ip}" | |
] | |
} | |
mutate { | |
add_tag => [ "SSH_Successful_Login" ] | |
} | |
} | |
geoip { | |
source => "src_ip" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
} | |
mutate { | |
convert => [ "[geoip][coordinates]", "float" ] | |
} | |
} | |
} | |
} | |
# Setting up VMware ESX(i) log parsing | |
filter { | |
if "VMware" in [tags] { | |
multiline { | |
pattern => "-->" | |
what => "previous" | |
} | |
grok { | |
break_on_match => true | |
match => [ | |
"message", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{SYSLOGPROG:syslog_program}: (?<message-body>(?<message_system_info>(?:\[%{DATA:message_thread_id} %{DATA:syslog_level} \'%{DATA:message_service}\'\ ?%{DATA:message_opID}])) \[%{DATA:message_service_info}]\ (?<syslog_message>(%{GREEDYDATA})))", | |
"message", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{SYSLOGPROG:syslog_program}: (?<message-body>(?<message_system_info>(?:\[%{DATA:message_thread_id} %{DATA:syslog_level} \'%{DATA:message_service}\'\ ?%{DATA:message_opID}])) (?<syslog_message>(%{GREEDYDATA})))", | |
"message", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{SYSLOGPROG:syslog_program}: %{GREEDYDATA:syslog_message}" | |
] | |
} | |
syslog_pri { } | |
date { | |
match => [ "syslog_timestamp", "YYYY-MM-ddHH:mm:ss,SSS" ] | |
timezone => "UTC" | |
} | |
mutate { | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
} | |
mutate { | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
if "Device naa" in [message] { | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "Device naa.%{WORD:device_naa} performance has %{WORD:device_status}%{GREEDYDATA} of %{INT:datastore_latency_from}%{GREEDYDATA} to %{INT:datastore_latency_to}", | |
"message", "Device naa.%{WORD:device_naa} performance has %{WORD:device_status}%{GREEDYDATA} from %{INT:datastore_latency_from}%{GREEDYDATA} to %{INT:datastore_latency_to}" | |
] | |
} | |
} | |
if "connectivity issues" in [message] { | |
grok { | |
match => [ | |
"message", "Hostd: %{GREEDYDATA} : %{DATA:device_access} to volume %{DATA:device_id} %{DATA:datastore} (following|due to)" | |
] | |
} | |
} | |
if "WARNING" in [message] { | |
grok { | |
match => [ | |
"message", "WARNING: %{GREEDYDATA:vmware_warning_msg}" | |
] | |
} | |
} | |
} | |
} | |
# Setting up VMware vCenter parsing | |
filter { | |
if "vCenter" in [tags] { | |
grok { | |
break_on_match => true | |
match => [ | |
"message", "<%{INT:syslog_pri}>%{SYSLOGTIMESTAMP} %{IPORHOST:syslog_hostname} %{TIMESTAMP_ISO8601:syslog_timestamp} (?<message-body>(?<message_system_info>(?:\[%{DATA:message_thread_id} %{DATA:syslog_level} \'%{DATA:message_service}\'\ ?%{DATA:message_opID}])) \[%{DATA:message_service_info}]\ (?<syslog_message>(%{GREEDYDATA})))", | |
"message", "<%{INT:syslog_pri}>%{SYSLOGTIMESTAMP} %{IPORHOST:syslog_hostname} %{TIMESTAMP_ISO8601:syslog_timestamp} (?<message-body>(?<message_system_info>(?:\[%{DATA:message_thread_id} %{DATA:syslog_level} \'%{DATA:message_service}\'\ ?%{DATA:message_opID}])) (?<syslog_message>(%{GREEDYDATA})))", | |
"message", "<%{INT:syslog_pri}>%{SYSLOGTIMESTAMP} %{IPORHOST:syslog_hostname} %{TIMESTAMP_ISO8601:syslog_timestamp} %{GREEDYDATA:syslog_message}" | |
] | |
} | |
syslog_pri { } | |
date { | |
match => [ "syslog_timestamp", "YYYY-MM-ddHH:mm:ss,SSS" ] | |
timezone => "UTC" | |
} | |
mutate { | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
} | |
} | |
# Setting up PFsense Firewall parsing | |
filter { | |
if "PFSense" in [tags] { | |
grok { | |
add_tag => [ "firewall" ] | |
match => [ "message", "<(?<evtid>.*)>(?<datetime>(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\s+(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:[0-5][0-9])) (?<prog>.*?): (?<msg>.*)" ] | |
} | |
mutate { | |
gsub => ["datetime"," "," "] | |
} | |
date { | |
match => [ "datetime", "MMM dd HH:mm:ss" ] | |
} | |
mutate { | |
replace => [ "message", "%{msg}" ] | |
} | |
mutate { | |
remove_field => [ "msg", "datetime", "prog" ] | |
} | |
} | |
if [syslog_program] =~ /^pf$/ { | |
mutate { | |
add_tag => [ "packetfilter" ] | |
} | |
multiline { | |
pattern => "^\s+|^\t\s+" | |
what => "previous" | |
} | |
mutate { | |
remove_field => [ "msg", "datetime" ] | |
remove_tag => [ "multiline" ] | |
} | |
grok { | |
match => [ "message", "rule (?<rule>.*)\(.*\): (?<action>pass|block) .* on (?<iface>.*): .* proto (?<proto>TCP|UDP|IGMP|ICMP) .*\n\s*(?<src_ip>(\d+\.\d+\.\d+\.\d+))\.?(?<src_port>(\d*)) [<|>] (?<dst_ip>(\d+\.\d+\.\d+\.\d+))\.?(?<dst_port>(\d*)):" ] | |
} | |
} | |
if [syslog_program] =~ /^dhcpd$/ { | |
if [message] =~ /^DHCPACK|^DHCPREQUEST|^DHCPOFFER/ { | |
grok { | |
match => [ "message", "(?<action>.*) (on|for|to) (?<src_ip>[0-2]?[0-9]?[0-9]\.[0-2]?[0-9]?[0-9]\.[0-2]?[0-9]?[0-9]\.[0-2]?[0-9]?[0-9]) .*(?<mac_address>[0-9a-fA-F][0-9a-fA-F]:[0-9a-fA-F][0-9a-fA-F]:[0-9a-fA-F][0-9a-fA-F]:[0-9a-fA-F][0-9a-fA-F]:[0-9a-fA-F][0-9a-fA-F]:[0-9a-fA-F][0-9a-fA-F]).* via (?<iface>.*)" ] | |
} | |
} | |
if [message] =~ /^DHCPDISCOVER/ { | |
grok { | |
match => [ "message", "(?<action>.*) from (?<mac_address>[0-9a-fA-F][0-9a-fA-F]:[0-9a-fA-F][0-9a-fA-F]:[0-9a-fA-F][0-9a-fA-F]:[0-9a-fA-F][0-9a-fA-F]:[0-9a-fA-F][0-9a-fA-F]:[0-9a-fA-F][0-9a-fA-F]).* via (?<iface>.*)" ] | |
} | |
} | |
if [message] =~ /^DHCPINFORM/ { | |
grok { | |
match => [ "message", "(?<action>.*) from (?<src_ip>.*).* via (?<iface>.*)" ] | |
} | |
} | |
} | |
if "_grokparsefailure" in [tags] { | |
drop { } | |
} | |
} | |
filter { | |
if "PFSense" in [tags] { | |
mutate { | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
} | |
} | |
# Setting up Citrix Netscaler parsing | |
filter { | |
if "Netscaler" in [tags] { | |
grok { | |
break_on_match => true | |
match => [ | |
"message", "<%{POSINT:syslog_pri}> %{DATE_US}:%{TIME} GMT %{SYSLOGHOST:syslog_hostname} %{GREEDYDATA:netscaler_message} : %{DATA} %{INT:netscaler_spcbid} - %{DATA} %{IP:clientip} - %{DATA} %{INT:netscaler_client_port} - %{DATA} %{IP:netscaler_vserver_ip} - %{DATA} %{INT:netscaler_vserver_port} %{GREEDYDATA:netscaler_message} - %{DATA} %{WORD:netscaler_session_type}", | |
"message", "<%{POSINT:syslog_pri}> %{DATE_US}:%{TIME} GMT %{SYSLOGHOST:syslog_hostname} %{GREEDYDATA:netscaler_message}" | |
] | |
} | |
syslog_pri { } | |
geoip { | |
source => "clientip" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
} | |
mutate { | |
add_field => [ "src_ip", "%{clientip}" ] | |
convert => [ "[geoip][coordinates]", "float" ] | |
replace => [ "@source_host", "%{host}" ] | |
replace => [ "@message", "%{netscaler_message}" ] | |
} | |
} | |
} | |
# Setting up Apache web server parsing | |
filter { | |
if [type] == "apache" { | |
grok { | |
pattern => "%{COMBINEDAPACHELOG}" | |
} | |
geoip { | |
source => "clientip" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
} | |
mutate { | |
add_field => [ "src_ip", "%{clientip}" ] | |
convert => [ "[geoip][coordinates]", "float" ] | |
replace => [ "@source_host", "%{host}" ] | |
replace => [ "@message", "%{message}" ] | |
rename => [ "verb" , "method" ] | |
} | |
grok { | |
match => [ | |
"message", "%{DATA:apache_vhost} " | |
] | |
} | |
} | |
} | |
# Setting up Nginx web server parsing | |
filter { | |
if [type] =~ "nginx" { | |
grok { | |
pattern => "%{COMBINEDAPACHELOG}" | |
} | |
geoip { | |
source => "clientip" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
} | |
mutate { | |
add_field => [ "src_ip", "%{clientip}" ] | |
convert => [ "[geoip][coordinates]", "float" ] | |
replace => [ "@source_host", "%{host}" ] | |
replace => [ "@message", "%{message}" ] | |
rename => [ "verb" , "method" ] | |
} | |
grok { | |
match => [ | |
"message", "%{DATA:apache_vhost} " | |
] | |
} | |
} | |
} | |
# Setting up Nginx errors parsing | |
filter { | |
if [type] == "nginx-error" { | |
grok { | |
match => [ | |
"message", "(?<timestamp>%{YEAR}[./-]%{MONTHNUM}[./-]%{MONTHDAY}[- ]%{TIME}) \[%{LOGLEVEL:severity}\] %{POSINT:pid}#%{NUMBER}: %{GREEDYDATA:errormessage}(?:, client: (?<clientip>%{IP}|%{HOSTNAME}))(?:, server: %{IPORHOST:server})(?:, request: %{QS:request}) ??(?:, host: %{QS:host})", | |
"message", "(?<timestamp>%{YEAR}[./-]%{MONTHNUM}[./-]%{MONTHDAY}[- ]%{TIME}) \[%{LOGLEVEL:severity}\] %{POSINT:pid}#%{NUMBER}: %{GREEDYDATA:errormessage}(?:, client: (?<clientip>%{IP}|%{HOSTNAME}))(?:, server: %{IPORHOST:server})(?:, request: %{QS:request})(?:, upstream: %{QS:upstream})(?:;, host: %{QS:host})(?:, referrer: \"%{URI:referrer})" | |
] | |
} | |
} | |
} | |
# Windows Eventlogs....Use NXLOG for client side logging | |
filter { | |
if [type] == "eventlog" { | |
grep { | |
match => { "EventReceivedTime" => "\d+"} | |
} | |
mutate { | |
lowercase => [ "EventType", "FileName", "Hostname", "Severity" ] | |
} | |
mutate { | |
rename => [ "Hostname", "@source_host" ] | |
} | |
date { | |
match => [ "EventReceivedTime", "UNIX" ] | |
} | |
mutate { | |
rename => [ "Message", "@message" ] | |
rename => [ "Severity", "eventlog_severity" ] | |
rename => [ "SeverityValue", "eventlog_severity_code" ] | |
rename => [ "Channel", "eventlog_channel" ] | |
rename => [ "SourceName", "eventlog_program" ] | |
rename => [ "SourceModuleName", "nxlog_input" ] | |
rename => [ "Category", "eventlog_category" ] | |
rename => [ "EventID", "eventlog_id" ] | |
rename => [ "RecordNumber", "eventlog_record_number" ] | |
rename => [ "ProcessID", "eventlog_pid" ] | |
} | |
} | |
} | |
# Microsoft IIS logging....Use NXLOG for client side logging | |
filter { | |
if [type] == "iis" { | |
if [message] =~ "^#" { | |
drop {} | |
} | |
grok { | |
match => [ | |
"message", "%{TIMESTAMP_ISO8601:logtime} %{IPORHOST:hostname} %{URIPROTO:cs_method} %{URIPATH:cs_stem} (?:%{NOTSPACE:cs_query}|-) %{NUMBER:src_port} %{NOTSPACE:cs_username} %{IP:clientip} %{NOTSPACE:cs_useragent} %{NUMBER:sc_status} %{NUMBER:sc_subresponse} %{NUMBER:sc_win32_status} %{NUMBER:timetaken}" | |
] | |
} | |
date { | |
match => [ "logtime", "YYYY-MM-dd HH:mm:ss" ] | |
timezone => "UTC" | |
} | |
geoip { | |
source => "clientip" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
} | |
dns { | |
reverse => [ "hostname" ] | |
action => "replace" | |
} | |
mutate { | |
add_field => [ "src_ip", "%{clientip}" ] | |
convert => [ "[geoip][coordinates]", "float" ] | |
replace => [ "@source_host", "%{hostname}" ] | |
replace => [ "@message", "%{message}" ] | |
rename => [ "cs_method", "method" ] | |
rename => [ "cs_stem", "request" ] | |
rename => [ "cs_useragent", "agent" ] | |
rename => [ "cs_username", "username" ] | |
rename => [ "sc_status", "response" ] | |
rename => [ "timetaken", "time_request" ] | |
} | |
} | |
} | |
filter { | |
if [type] == "mysql-slowquery" { | |
multiline { | |
what => previous | |
pattern => "^\s" | |
} | |
grok { pattern => "^%{NUMBER:date} *%{NOTSPACE:time}" } | |
mutate { replace => [ "time", "%{date} %{time}" ] } | |
date { | |
match => [ "YYMMdd H:mm:ss", "YYMMdd HH:mm:ss" ] | |
} | |
mutate { remove => [ "time", "date" ] } | |
split { } | |
} | |
} | |
# Create @source_host_ip field for all devices for IP Tracking used along with src_ip and dst_ip fields | |
filter { | |
if ![source_host_ip] { | |
mutate { | |
add_field => [ "source_host_ip", "%{@source_host}" ] | |
} | |
dns { | |
resolve => [ "source_host_ip" ] | |
action => "replace" | |
} | |
mutate { | |
rename => [ "source_host_ip", "@source_host_ip" ] | |
} | |
} | |
} | |
# The below filter section will be used to remove unnecessary fields to keep ES memory cache from filling up with useless data | |
# The below filter section will be where you would want to comment certain types or tags out if trying to isolate a logging issue | |
filter { | |
if [type] == "apache" { | |
mutate { | |
remove_field => [ "clientip", "host", "timestamp" ] | |
} | |
} | |
if [type] == "eventlog" { | |
mutate { | |
remove => [ "SourceModuleType", "EventTimeWritten", "EventTime", "EventReceivedTime", "EventType" ] | |
} | |
} | |
if [type] == "iis" { | |
mutate { | |
remove_field => [ "clientip", "host", "hostname", "logtime" ] | |
} | |
} | |
if [type] =~ "nginx" { | |
mutate { | |
remove_field => [ "clientip", "host", "timestamp" ] | |
} | |
} | |
if "Netscaler" in [tags] { | |
mutate { | |
remove_field => [ "clientip" ] | |
} | |
} | |
if [type] == "syslog" { | |
mutate { | |
remove_field => [ "host", "received_at", "received_from", "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
} | |
if [type] == "VMware" { | |
mutate { | |
remove_field => [ "host", "program", "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
} | |
if [type] == "vCenter" { | |
mutate { | |
remove_field => [ "host", "message-body", "program", "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
} | |
} | |
#### Multicast discovery mode #### | |
# Send output to the ES cluster logstash-cluster using a predefined template | |
# The following settings will be used during the initial setup which will be used for using multicast ES nodes | |
# When changing to unicast discovery mode you need to comment out the following section and configure the unicast discovery mode in the next section | |
output { | |
elasticsearch { | |
cluster => "logstash-cluster" | |
flush_size => 1 | |
manage_template => true | |
template => "/opt/logstash/lib/logstash/outputs/elasticsearch/elasticsearch-template.json" | |
} | |
} | |
#### Unicast discovery mode #### | |
# Send output to the ES cluster logstash-cluster using a predefined template | |
# The settings below will be used when you change to unicast discovery mode for all ES nodes | |
# Make sure to comment out the above multicast discovery mode section | |
#output { | |
# elasticsearch { | |
# cluster => "logstash-cluster" | |
# host => "logstash" | |
# port => "9300" | |
# protocol => "node" | |
# flush_size => "1" | |
# manage_template => true | |
# template => "/opt/logstash/lib/logstash/outputs/elasticsearch/elasticsearch-template.json" | |
# } | |
#} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment