Skip to content

Instantly share code, notes, and snippets.

@surlypants
Created June 2, 2015 18:52
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save surlypants/2e1ef99cbc4e2136aff1 to your computer and use it in GitHub Desktop.
Save surlypants/2e1ef99cbc4e2136aff1 to your computer and use it in GitHub Desktop.
Logstash silently stops processing events and does not respond to SIGTERM
input {
stdin { }
}
input {
file {
type => "syslog-ng"
path => ["/var/log/syslog-ng/*/*/*/*.log"]
#start_position => "beginning"
}
}
filter {
if [type] == "syslog-ng" {
grok {
patterns_dir => "/opt/logstash/patterns"
match => [ "path", "/var/log/syslog-ng/%{IPORHOST:remote_host}/%{YEAR}/%{MONTHNUM}/%{YEAR}%{MONTHNUM}%{MONTHDAY}.log" ]
}
mutate {
add_field => { "syslog_host" => "%{host}" }
}
mutate {
replace => [ "host", "%{remote_host}" ]
remove_field => [ "remote_host" ]
}
if [message] =~ /elasticsearch:/ {
mutate { add_tag => [ "elasticsearch" ] }
}
else if [message] =~ /\[NXLOG@/ {
mutate { add_tag => [ "nxlog", "noalert" ] }
}
else if [host] =~ /10\.5\.1\.1[1-4]$/ or [host] =~ /10\.7\.1\.1[1-9]$/ or [host] =~ /10\.7\.1\.2[01]$/ or [host] =~ /172\.2[02]\.8\.1[34]$/ or [host] =~ /172\.20\.71\.10[1-4]/ or [host] =~ /172\.22\.71\.1[1-4]$/ {
mutate { add_tag => [ "solaris" ] }
}
else if [host] == "10.5.1.10" {
mutate { add_tag => [ "coraid" ] }
}
else if [host] =~ /10\.90\.9\.1[01]$/ or [host] =~ /(?i)^N6K/ {
mutate { add_tag => [ "nexus" ] }
}
else if [host] =~ /(?i)^3PAR/ or [host] =~ /172\.20\.8\.2[02]$/ or [host] == "10.5.1.250" or [host] =~ /10\.5\.1\.24[468]/ or [host] == "172.22.8.23" {
mutate { add_tag => [ "3par" ] }
}
else if [host] =~ /(?i)sha/ or [host] == "10.90.9.100" {
mutate { add_tag => [ "riverbed", "noalert" ] }
}
else if [host] =~ /10\.0\.1\.1[0-2]$/ {
mutate { add_tag => [ "ucs", "ucsmanager" ] }
}
else if [host] =~ /10\.5\.1\.3[23]$/ or [host] =~ /172\.20\.8\.4[01]$/ {
mutate { add_tag => [ "brocade" ] }
}
else if [host] =~ /(?i)ucs/ or [host] =~ /10\.5\.1\.10[0-2]/ or [host] =~ /10\.5\.1\.25[2-4]/ or [host] =~ /10\.5\.3\.9[89]$/ or [host] == "10.5.3.100" or [host] =~ /10\.5\.3\.25[2-4]/ or [host] =~ /172\.22\.5\.25[2-4]/ {
mutate { add_tag => [ "ucs" ] }
}
else if [host] =~ /(?i)esx/ {
mutate { add_tag => [ "esx", "noalert" ] }
}
else if [host] =~ /10\.4\.1\./ or [host] =~ /172\.20\.5\./ or [host] =~ /172\.22\.4\./ {
#mutate { add_tag => [ "vpxa", "noalert" ] }
drop {}
}
else if [host] == "vp-qamgmt02" {
mutate { add_tag => [ "vcsa", "noalert" ] }
}
else {
mutate { add_tag => [ "unclassified" ] }
}
}
}
filter {
if "unclassified" in [tags] or "solaris" in [tags] or "coraid" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
#2015-04-21T13:12:02-03:00 10.5.1.11 sshd[23829]: [ID 800047 auth.info] Keyboard-interactive (PAM) userauth failed[13] while authenticating: No account present for user
#2015-04-21T13:03:54-03:00 vp-logstash02 sshd[11155]: pam_unix(sshd:session): session closed for user robin
#2015-04-21T13:11:40-03:00 10.5.1.10 dropbear[15312]: Exit before auth: Exited normally
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{MYSYSLOGPROG}: ((\[%{WORD:syslog_msgid_id} %{NUMBER:syslog_msgid_msg})? %{WORD:syslog_facility}.%{WORD:syslog_level}\] ?)?%{GREEDYDATA:message_syslog}",
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{GREEDYDATA:message_syslog}"
]
}
}
}
filter {
if "elasticsearch" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{PROG:message_program}: \[%{DATA:syslog_level}\]\[%{DATA:syslog_facility}\] \[%{DATA:elasticsearch_name}\] \[%{DATA:elasticsearch_index}\]\[%{NUMBER:elasticsearch_shard}\] %{GREEDYDATA:message_syslog}",
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{PROG:message_program}: \[%{DATA:syslog_level}\]\[%{DATA:syslog_facility}\] \[%{DATA:elasticsearch_name}\] %{GREEDYDATA:message_syslog}"
]
}
mutate {
rename => [ "elasticsearch_index]", "[elasticsearch][index]" ]
rename => [ "elasticsearch_name]", "[elasticsearch][name]" ]
rename => [ "elasticsearch_shard]", "[elasticsearch][shard]" ]
}
}
}
filter {
if "esx" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
# custom
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{MYSYSLOGPROG}: \[%{BASE16NUM:message_thread_id} %{WORD:syslog_level} \'%{DATA:message_service}(?:[;'])( opID=%{DATA:message_opID})?( user=%{USERNAME:message_user})?\]( \[%{DATA:message_service_info}\])?(%{SPACE}?%{GREEDYDATA:local_message})?",
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{MYSYSLOGPROG}:(( (?<esx_cpu>cpu%{NUMBER}):%{NUMBER:esx_cpu_id})( opID=%{DATA:message_opID})?\)(World: %{NUMBER:esx_world_id}:)?)?(%{SPACE}?%{GREEDYDATA:local_message})?",
###
### BEGIN SEXILOG
###
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{MYSYSLOGPROG}: (?<body_type_1>(?<message_body>(?<message_system_info>(?:\[%{DATA:message_thread_id} %{DATA:syslog_level} \'%{DATA:message_service}\'\ ?%{DATA:message_opID}])) \[%{DATA:message_service_info}]\ (?<message_syslog>(%{GREEDYDATA}))))",
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{MYSYSLOGPROG}: (?<body_type_2>(?<message_body>(?<message_system_info>(?:\[%{DATA:message_thread_id} %{DATA:syslog_level} \'%{DATA:message_service}\'\ ?%{DATA:message_opID}])) (?<message_syslog>(%{GREEDYDATA}))))",
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{MYSYSLOGPROG}: (?<body_type_3>(?<message_body>%{GREEDYDATA:message_syslog}))",
"message", ".*?[\s\r\t](?<hostname>[a-zA-Z0-9\-_]+[.][a-zA-Z0-9\-_\.]+)[\s].*?(?<message_program>[a-zA-Z0-9\-\[\]_]{3,})[:][\s](?<body_type_6>(?<message_body>(?<message_syslog>.*)))",
"message", "(?<body_type_7>(?<message_body>(?<message_debug>.*)))"
]
}
###
### more custom
###
if [local_message] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => false
tag_on_failure => []
match => [
"local_message", "VC opID %{DATA:esx_vc_opID} maps to vmkernel opID %{DATA:esx_vmkernel_opID}(%{GREEDYDATA)?",
"local_message", "<%{DATA:esx_datastore_name}(?:(, %{NUMBER}))> Number of hosts has changed to %{NUMBER:esx_datastore_mount_count}(%{GREEDYDATA)?"
##-- FINISH task-internal-3403980 -- -- vpxapi.VpxaService.querySummaryStatistics --
#"local_message", "(?:--) %{WORD:task_status}(?: (?<task>(?:task)-(?<task_type>(?:[A-Za-z]+))-(?<task_id>%{POSINT})))(?: --)(?: --) %{GREEDYDATA:task_subsystem}(?: --)",
##-- BEGIN session[e95a8fe6-c152-9c52-e74d-7f593d3d3894]52373d67-9290-a767-f339-027d7b2d24f6 -- -- vpxapi.VpxaService.retrieveChanges -- e95a8fe6-c152-9c52-e74d-7f593d3d3894
]
}
}
if [message] =~ /(?i)warning|error|fault|ALERT|busy|Failed|[\s]dead|[\s]space|esx\.|vob\.|com\.vmware|nmp|volume|consolidate|FS3|question|ha-eventmgr|VisorFS|Fil3|DLX|MCE|HBX|MPN|mpn|p2m|Reset|msg\./ and [message] !~ /(?i)crossdup|hostprofiletrace/{
if [message] =~ /(?i)vmkwarning:/ and [message] !~ /(?i)crossdup|performance|LinuxCharWrite/{
# <181>2014-12-18T18:30:36.400Z esx.vmware.com vmkwarning: cpu29:4317)WARNING: vmw_psp_rr: psp_rrSelectPathToActivate:972:Could not select path for device "naa.60002ac000000000000004b00000d155".
mutate {
add_tag => "vmkwarning"
}
}
if [message] =~ /(?i)ALERT:/{
# <181>2014-12-17T07:50:52.629Z esx.vmware.com vmkernel: cpu9:8942)ALERT: URB timed out - USB device may not respond
mutate {
add_tag => "achtung"
add_field => { "alert" => "ALERT" }
}
} else if [message] =~ /(?i)[\s]dead/ {
# <166>2014-09-15T14:52:23.782Z esx.vmware.com Hostd: [77381B90 error 'Default'] Unable to build Durable Name dependent properties: Unable to query VPD pages from device, all paths are dead, no I/O possible.
mutate {
add_tag => "achtung"
add_field => { "alert" => "dead" }
}
} else if [message] =~ /(?i)[\s]space/ and [message] !~ /(?i)Need/{
# <181>2014-12-10T15:19:40.411Z esx.vmware.com vmkernel: cpu6:16764680)WARNING: Swap: vm 17473281: 5501: Failed to extend swap file type=regular from 0 KB to 4194304 KB. status=No space left on device
# <166>2014-12-10T15:54:45.807Z esx.vmware.com Hostd: [55124B90 info 'ha-eventmgr'] Event 3593 : Message on vm_name on esx.vmware.com in ha-datacenter: There is no more space for virtual disk /vmfs/volumes/512f4eb6-27ff1a2e-f082-d4ae5264813c/vm_name/vm_name.vmdk. You might be able to continue this session by freeing disk space on the relevant volume, and clicking Retry. Click Cancel to terminate this session.
# <13>2014-12-22T01:01:01Z esx.vmware.com root: Unable to extract system configuration. Are you out of disk space?
mutate {
add_tag => "achtung"
add_field => { "alert" => "space" }
}
} else if [message] =~ /(?i)esx\.problem/ {
# <14>2014-12-10T18:01:03.496Z esx.vmware.com vobd: [scsiCorrelator] 14807183227307us: [esx.problem.scsi.device.io.latency.high] Device naa.60a9800041764b6c463f437868556b7a performance has deteriorated. I/O latency increased from average value of 1343 microseconds to 28022 microseconds.
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<esx_problem>esx\.problem\.[a-zA-Z\.]+)"
]
add_tag => "alert"
add_field => { "alert" => "%{esx_problem}" }
}
if [message] =~ /(?i)lost|degraded|ntpd|pageretire|apd|permanentloss|atquota|pathstatechanges|visorfs|heartbeat|corrupt|disconnect|scsipath\.por|dump|duplicate/ {
mutate {
add_tag => "achtung"
}
}
} else if [message] =~ /(?i)esx\.clear/ {
# <14>2014-12-10T16:31:21.405Z esx.vmware.com vobd: [UserLevelCorrelator] 84941123us: [esx.clear.coredump.configured] A vmkcore disk partition is available and/or a network coredump server has been configured. Host core dumps will be saved.
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<esx_clear>esx\.clear\.[a-zA-Z\.]+)"
]
add_tag => "alert"
add_field => { "alert" => "%{esx_clear}" }
}
} else if [message] =~ /(?i)esx\.audit/ {
# <14>2014-12-10T16:31:21.337Z esx.vmware.com vobd: [UserLevelCorrelator] 84872345us: [esx.audit.host.boot] Host has booted.
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<esx_audit>esx\.audit\.[a-zA-Z\.]+)"
]
add_tag => "alert"
add_field => { "alert" => "%{esx_audit}" }
}
} else if [message] =~ /(?i)vob\./ {
# <14>2014-12-10T17:28:18.087Z esx.vmware.com vobd: [GenericCorrelator] 3639187707810us: [vob.user.iorm.nonviworkload] Detected a non vi workload on datastore datastore_name
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<vob>vob\.[a-zA-Z\.]+)"
]
add_tag => "alert"
add_field => { "alert" => "%{vob}" }
}
if [message] =~ /(?i)dead|corruptondisk|mce|dump|heartbeat/ {
mutate {
# add_tag => "achtung"
}
}
} else if [message] =~ /(?i)com\.vmware/ {
# <181>2014-12-10T17:14:26.203Z esx.vmware.com vmkernel: cpu10:674239)DVSDev: DVSDevDataSet:968: setting data com.vmware.common.port.volatile.persist on port 2675
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<com_vmware>com\.vmware\.[a-zA-Z\.]+)"
]
add_field => { "alert" => "%{com_vmware}" }
}
if [message] =~ /(?i)com\.vmware\.vc\.ha\./ {
mutate {
add_tag => "achtung"
}
}
} else if [message] =~ /(?i)nmp/ {
# <181>2014-12-10T18:21:53.018Z esx.vmware.com vmkernel: cpu13:1264143)WARNING: NMP: nmp_DeviceRequestFastDeviceProbe:237:NMP device "naa.60a9800041764b6c463f437868556c47" state in doubt; requested fast path state update...
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<nmp>nmp_[a-zA-Z0-9\-_]+)[:]"
]
add_tag => "alert"
add_field => { "alert" => "%{nmp}" }
}
} else if [message] =~ /(?i)Lost access to volume/ {
# <166>2014-12-16T22:07:35.612Z esx.vmware.com Hostd: [3647FB90 info 'ha-eventmgr'] Event 89814 : Lost access to volume 50867547-26003ed2-06b9-0024e8565b3d (datastore_name) due to connectivity issues. Recovery attempt is in progress and outcome will be reported shortly.
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?i)Lost access to volume.*(%{GREEDYDATA:lost_datastore})"
]
add_tag => "achtung"
add_field => { "alert" => "Lost access to volume" }
}
} else if [message] =~ /(?i)Long VMFS3 rsv time/{
# <181>2014-12-10T18:59:00.309Z esx.vmware.com vmkernel: cpu45:9821)FS3Misc: 1440: Long VMFS3 rsv time on 'datastore_name' (held for 261 msecs). # R: 1, # W: 1 bytesXfer: 5 sectors
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "Long VMFS3 rsv time on '%{GREEDYDATA:datastore_name}'"
]
add_field => { "alert" => "Long_VMFS3_rsv_time" }
}
} else if [message] =~ /(?i)question/ and [message] !~ /(?i)Answered/{
# <166>2014-12-17T13:37:14.310Z esx.vmware.com Hostd: [34DD1B90 verbose 'vm:/vmfs/volumes/5335bc2e-70db5326-9cc7-f0921c0b59bd/vm_name/vmx_name.vmx'] Clearing VM question
mutate {
add_tag => "achtung"
add_field => { "alert" => "question_not_answered" }
}
} else if [message] =~ /(?i)ha-datacenter/ and [message] =~ /ha-eventmgr/{
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", ".*ha-eventmgr.*(M|m)essage.*ha-datacenter:[\s]{0,1}%{DATA:ha_datacenter_msg}"
]
}
} else if [message] =~ /(?i)visorfs/ and [message] =~ /(?i)inode/ and [message] !~ /(?i)hostprofiletrace/ {
#<181>2014-12-10T16:09:58.394Z esx.vmware.com vmkernel: cpu29:5640)VisorFS: 688: Failed to get object 4 inode 7921234 :Transient file system condition, suggest retry
mutate {
add_tag => "alert"
add_field => { "alert" => "visorfs" }
}
} else if [message] =~ /(?i)Fil3:/{
# <181>2014-12-08T21:10:25.383Z esx.vmware.com vmkernel: cpu9:5007)Fil3: 13496: Max retries (10) exceeded for caller Fil3_FileIO (status 'IO was aborted by VMFS via a virt-reset on the device')
mutate {
add_tag => "achtung"
add_field => { "alert" => "Fil3" }
}
if [message] =~ /(?i)virt\-reset/{
mutate {
add_field => { "alert" => "virt-reset" }
}
}
} else if [message] =~ /(?i)busy/ and [message] !~ /(?i)crossdup|hostprofiletrace/{
# <14>2014-12-18T17:15:55.901Z esx.vmware.com storageRM: <datastore_name> retry : 1, Device or resource busy Error -1 opening file /vmfs/volumes//datastore_name/.iorm.sf/slotsfile
mutate {
add_tag => "alert"
add_field => { "alert" => "busy" }
}
} else if [message] =~ /(?i)DLX:/{
# <181>2014-12-19T07:28:11.537Z esx.vmware.com vmkernel: cpu22:4167)DLX: 3394: vol 'datastore_name': [Req mode 1] Checking liveness of [type 10c00002 offset 9684992 v 204, hb offset 3223552
mutate {
add_tag => "alert"
add_field => { "alert" => "DLX" }
}
} else if [message] =~ /(?i)FS3:/ and [message] !~ /(?i)Long VMFS3 rsv time/{
# <181>2014-12-17T03:14:41.217Z esx.vmware.com vmkernel: cpu13:13040893)FS3: 1341: vol 'datastore_name': [Req mode: 1] Not free; Lock [type 10c00002 offset 11296768 v 274, hb offset 3719168
mutate {
add_tag => "alert"
add_field => { "alert" => "FS3" }
}
} else if [message] =~ /(?i)Failed to lock/{
# <166>2014-12-10T17:36:09.120Z esx.vmware.com Vpxa: [FFF08B90 info 'DiskLib' opID=36139059-8e] DISKLIB-DSCPTR: DescriptorDetermineType: failed to open '/vmfs/volumes/5335bc2e-70db5326-9cc7-f0921c0b59bd/vm_name/vm_name-flat.vmdk': Failed to lock the file (400000003)
mutate {
add_tag => "alert"
add_field => { "alert" => "failed_to_lock" }
}
} else if [message] =~ /(?i)FileIO error/{
# <166>2014-12-10T13:05:07.765Z esx.vmware.com Hostd: [FF9EFAD0 error 'Statssvc'] Unable to load stats depot hostAgentStats in directory /var/lib/vmware/hostd/stats/ : FileIO error: Could not find file : /var/lib/vmware/hostd/stats/hostAgentStats.idMap
mutate {
add_tag => "alert"
add_field => { "alert" => "FileIO_error" }
}
} else if [message] =~ /(?i)Device or resource busy/{
# <14>2014-12-19T09:50:55.992Z esx.vmware.com storageRM: open /vmfs/volumes//datastore_name/.iorm.sf/slotsfile(0x10000042, 0x0) failed: Device or resource busy
mutate {
add_tag => "alert"
add_field => { "alert" => "Device_or_resource_busy" }
}
} else if [message] =~ /(?i)MCE:/{
# <181>2014-12-19T13:43:04.147Z esx.vmware.com vmkernel: cpu0:9892203)MCE: 1282: Status bits: "Memory Controller Error."
mutate {
# add_tag => "alert"
add_field => { "alert" => "Machine_Check_Exception" }
}
} else if [message] =~ /(?i)failed to open/ and [message] !~ /(?i)Failed to lock/{
# <166>2014-12-10T13:27:10.622Z esx.vmware.com Vpxa: [4D343B90 warning 'Libs' opID=feab74be-b8] [NFC ERROR] NfcFile_GetInfo: Failed to open file
mutate {
add_tag => "alert"
add_field => { "alert" => "failed_to_open" }
}
} else if [message] =~ /(?i)The file specified is not a virtual disk/{
# <166>2014-12-10T14:13:35.458Z esx.vmware.com Vpxa: [FFEE7B90 info 'DiskLib' opID=bb64bb70-90] DISKLIB-LINK : "/vmfs/volumes/5417e43a-a20f2412-f4b1-f0921c0b59bd/vm_name/vm_name-flat.vmdk" : failed to open (The file specified is not a virtual disk).
mutate {
add_tag => "alert"
add_field => { "alert" => "not_a_virtual_disk" }
}
} else if [message] =~ /(?i)Failed to load virtual machine/{
# <166>2014-10-01T08:35:11.001Z esx.vmware.com Hostd: [FF8A5AD0 info 'vm:/vmfs/volumes/53a2afe2-799847f0-5f86-d89d675b0341/PARG1DFXOFXC03/PARG1DFXOFXC03.vmx'] Failed to load virtual machine. Marking as unavailable: vim.fault.InvalidVmConfig
mutate {
add_tag => "alert"
add_field => { "alert" => "InvalidVmConfig" }
}
} else if [message] =~ /(?i)HBX:/{
# <181>2014-12-08T21:10:11.803Z esx.vmware.com vmkernel: cpu12:4131)HBX: 231: Reclaimed heartbeat for volume 50813de9-d9039ea0-d6c1-0024e8565b3d (datastore_name): [Timeout] [HB state abcdef02 offset 3829760 gen 55 stampUS 20086651705675 uuid 5353935d-484cdab2-a7e9-0017a477$
mutate {
add_tag => "alert"
add_field => { "alert" => "HBX" }
}
} else if [message] =~ /(?i)[\s](mpn)[s]{0,1}[\s]/{
# <181>2014-12-10T11:13:09.089Z esx.vmware.com vmkernel: cpu32:115278)PageRetire: 1237: Invalid MPN range [0x0, 0x1)
mutate {
add_tag => "alert"
add_field => { "alert" => "Machine_Page_Number" }
}
} else if [message] =~ /(?i)Input\/output error/{
# <166>2014-12-19T11:03:46.984Z esx.vmware.com Hostd: --> Error : Error: Error opening /dev/disks/naa.50060160bb20559c50060160bb20559c: Input/output error
mutate {
add_tag => "alert"
add_field => { "alert" => "Input_output_error" }
}
} else if [message] =~ /(?i)Could not open device/{
# <182>2014-08-12T15:54:49.908Z esx.vmware.com vmkernel: cpu26:13028837)Vol3: 2174: Could not open device 'naa.600605b0070ec9c01b0613ea1266322e:5' for probing: I/O error
mutate {
add_tag => "achtung"
add_field => { "alert" => "Could_not_open_device" }
}
} else if [message] =~ /(?i)cannot reserve/{
# <181>2014-12-10T19:17:17.845Z esx.vmware.com vmkernel: cpu3:9223998)VmMemCow: 1599: p2m update: cannot reserve - cur 2432 2432 rsvd 0 req 1 avail 2432
mutate {
add_tag => "alert"
add_field => { "alert" => "cannot_reserve" }
}
} else if [message] =~ /(?i)Failed to delete/{
# <166>2014-12-16T07:51:54.614Z esx.vmware.com Hostd: [213E0B90 warning 'vm:/vmfs/volumes/4cd43053-eed404ad-27c4-78e7d163b271/vm_name/vmx_name.vmx' opID=662EDE05-00008331-5c-62] DeleteVmDirectory: Failed to delete vm dir '/vmfs/volumes/4cd43053-eed404ad-27c4-78e7d163b271/vm_name'
mutate {
add_tag => "alert"
add_field => { "alert" => "Failed_to_delete" }
}
} else if [message] =~ /(?i)A fault has occurred causing a virtual CPU to enter the shutdown state/{
mutate {
add_tag => "alert"
add_field => { "alert" => "vcpu_shutdown_state" }
}
} else if [message] =~ /(?i)The system has rolled back to a previous image/{
mutate {
add_tag => "alert"
add_field => { "alert" => "The_system_has_rolled_back" }
}
} else if [message] =~ /(?i)more than 100 redo logs/{
mutate {
add_tag => "achtung"
add_field => { "alert" => "more_than_100_redo_logs" }
}
} else if [message] =~ /(?i)Failed to consolidate/{
# <166>2014-12-29T09:47:08.482Z esx.vmware.com Hostd: [32FFFB90 info 'vm:/vmfs/volumes/54929abf-956ed001-31e1-d89d675ba101/vm_name/vmx_name.vmx'] Failed to consolidate disks in Foundry: Error: (15) The file is already in use
mutate {
add_tag => "alert"
add_field => { "alert" => "Failed_to_consolidate" }
}
} else if [message] =~ /(?i)needConsolidate is true/{
# 2012-05-22T12:36:59.101Z [5ED40B90 verbose 'vm:/vmfs/volumes/4f66bf0c-9c44e66e-d073-984be10fb0db/vm_name/vmx_name.vmx'] Time to gather Snapshot information ( read from disk, build tree): 3 msecs. needConsolidate is true.
mutate {
add_tag => "alert"
add_field => { "alert" => "needConsolidate" }
}
} else if [message] =~ /(?i)ErrDev/{
# <181>2014-12-10T10:58:18.738Z esx.vmware.com vmkwarning: cpu12:12414801)WARNING: ErrDev: 94: The err device was accessed. This should not happen frequently.
mutate {
add_tag => "alert"
add_field => { "alert" => "The_err_device_was_accessed" }
}
} else if [message] =~ /(?i)vmodl.fault.ManagedObjectNotFound/{
# 2015-01-15T17:48:31.426Z [21B61B90 verbose 'Placement' opID=SWI-7a6e2ab8] [PlacementManagerImpl::HandleNotPlacedVms] Reset Vm /vmfs/volumes/4ea98080-426bf6f3-0aaf-78e7d163b271/vm_name/vmx_name.vmx, vmodl.fault.ManagedObjectNotFound
mutate {
add_tag => "achtung"
add_field => { "alert" => "vmodl.fault.ManagedObjectNotFound" }
}
} else if [message] =~ /(?i)msg\.[a-zA-Z0-9\-_\.]+/ {
# <166>2015-01-22T22:31:58.616Z esx.vmware.com vmkwarning Hostd: --> key = "msg.checkpoint.precopyfailure",
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<msg>msg\.[a-zA-Z0-9\-_\.]+)"
]
}
if [msg] != "msg.dictionary.load.openFailed" {
mutate {
add_field => { "alert" => "%{msg}" }
add_tag => "achtung"
}
}
}
#################
# if [message] =~ /Can't convert IP Address of type 0/{
# mutate {
# add_tag => "alert"
# add_field => { "alert" => "IP_Address_type_0" }
# }
# }
# if [message] =~ /vim.fault.InvalidState/{
# <166>2014-12-10T15:32:50.535Z esx.vmware.com Hostd: [2C3ECB90 info 'Default' opID=b380000c-8a] AdapterServer caught exception: vim.fault.InvalidState
# mutate {
# add_tag => "alert"
# add_field => { "alert" => "vim.fault.InvalidState" }
# }
# }
# if [message] =~ /Failed to diff/{
# <166>2014-12-10T13:05:58.790Z esx.vmware.com Vpxa: [FFFBAB90 error 'PropertyCache' opID=WFU-9c45a380] Failed to diff ha-host:network, had ManagedObjectReference[], got ManagedObjectReference[]
# mutate {
# add_tag => "alert"
# add_field => { "alert" => "Failed_to_diff" }
# }
# }
# if [message] =~ /Failed write command to write-quiesced partition/{
# ALERT: ScsiDeviceIO: 3075: Failed write command to write-quiesced partition naa.6006000000000000aa0aaa0000a0a000:1
# mutate {
# add_tag => "achtung"
# add_field => { "alert" => "Failed_write_command" }
# }
# }
# if [message] =~ /No such file or directory/ and [message] !~ /load.openFailed/{
# # <166>2014-12-10T16:49:03.570Z esx.vmware.com Hostd: [379D2B90 verbose 'vm:/vmfs/volumes/548076ca-1a5e9feb-b886-fc15b415a120/vm_name/vmx_name.vmx'] UpdateFileInfo: Failed to find file size for : No such file or directory
# mutate {
# # add_tag => "alert"
# add_field => { "alert" => "No_such_file_or_directory" }
# }
# }
# if [message] =~ /poweron failed with Input\/output error /{
# mutate {
# add_tag => "alert"
# add_field => { "alert" => "poweron_failed" }
# }
# }
# if [message] =~ /L2Sec_EnforcePortCompliance/{
# <181>2014-12-10T19:23:45.733Z esx.vmware.com vmkernel: cpu14:1559738)etherswitch: L2Sec_EnforcePortCompliance:226: client vm_name.eth0 requested promiscuous mode on port 0x200002b, disallowed by vswitch policy
# mutate {
# add_tag => "alert"
# add_field => { "alert" => "L2Sec_EnforcePortCompliance" }
# }
# }
}
#################
if [message] =~ /(?i)vmfs/ {
# <166>2014-12-10T18:33:54.239Z esx.vmware.com Hostd: [3451AB90 verbose 'vm:/vmfs/volumes/f8431d5d-1d30ae43/vm_name/vmx_name.vmx'] Actual VM overhead: 170840064 bytes
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "/vmfs/volumes/%{USERNAME:vmfs_uuid}/%{USERNAME:vm_name}/%{USERNAME:vmx_name}[.](vmx|vswp|vmdk|log|vmxf|ctk|vmss|nvram|xml)",
"message", "/vmfs/volumes/%{USERNAME:vmfs_uuid}"
]
}
}
if [message] =~ /(?i)naa\./ {
# <181>2014-12-10T18:25:25.426Z esx.vmware.com vmkernel: cpu9:7045276)ScsiDeviceIO: 1198: Device naa.60a9800041764b6c463f437868556b7a performance has improved. I/O latency reduced from 5587 microseconds to 2650 microseconds.
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<canonical_name>naa\.[a-f0-9]+)"
]
}
}
if [message] =~ /(?i)MB/ {
# <181>2014-12-10T18:50:27.101Z esx.vmware.com vmkernel: cpu11:1674338)VMotionSend: 3508: 1418237421145920 S: Sent all modified pages to destination (network bandwidth ~460.523 MB/s)
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "[~\s]{0,1}%{NUMBER:mbps:float}[\s]{0,1}MB\/s"
]
}
}
if [message] =~ /(?i)ms/ {
# <181>2014-12-10T18:43:57.374Z esx.vmware.com vmkernel: cpu3:9751)FS3Misc: 1440: Long VMFS3 rsv time on 'datastore_name' (held for 272 msecs). # R: 1, # W: 1 bytesXfer: 5 sectors
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "[\s]%{NUMBER:milliseconds:int}[\s].*(ms|msecs)"
]
}
} else if [message] =~ /(?i)micro/ {
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", ".*[\s]%{NUMBER:microseconds:int}[\s]micro[sS]ec"
]
}
}
if [message] =~ /(?i)vmhba/ {
# <181>2014-12-10T16:30:49.589Z esx.vmware.com vmkernel: cpu14:4739)ScsiPath: 4552: Plugin 'NMP' claimed path 'vmhba0:C0:T0:L3'
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<runtime_name>vmhba[0-9]+:C[0-9]+:T[0-9]+:L[0-9]+)"
]
}
}
if [message] =~ /(?i)sense data/ {
# <181>2014-12-10T18:21:53.018Z esx.vmware.com vmkernel: cpu13:1264143)ScsiDeviceIO: 2311: Cmd(0x4124403de140) 0x28, CmdSN 0x8000004b from world 1171425 to dev "naa.60a9800041764b6c463f437868556c47" failed H:0x2 D:0x0 P:0x0 Possible sense data: 0x0 0x0 0x0.
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<scsi_code>H:[a-f0-9]+x[a-f0-9]+ D:[a-f0-9]+x[a-f0-9]+ P:[a-f0-9]+x[a-f0-9]+).*(?<sense_data>[a-f0-9]+x[a-f0-9]+ [a-f0-9]+x[a-f0-9]+ [a-f0-9]+x[a-f0-9]+)"
]
}
if [sense_data] != "0x0 0x0 0x0" {
mutate {
add_tag => "alert"
add_field => { "alert" => "%{sense_data}" }
}
}
}
if [message] =~ /(?i)vmnic/ {
# <181>2014-12-10T18:44:52.380Z esx.vmware.com vmkernel: cpu18:8243)<3>ixgbe: vmnic6: ixgbe_alloc_rx_queue: allocated rx queue 1
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<vmnic>vmnic[0-9]+)"
]
}
}
if [message] =~ /(?i)[\s]vol[\s]/{
# <181>2014-12-18T00:48:42.879Z esx.vmware.com vmkernel: cpu5:14780898)FS3: 1227: vol 'datastore_name': [Req mode: 1] Checking liveness of [type 10c00002 offset 9834496 v 1112, hb offset 3719168
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", " vol '%{GREEDYDATA:datastore_name}'"
]
}
}
if [message] =~ /(?i)vim\./ {
# <166>2014-12-10T18:39:25.632Z esx.vmware.com Vpxa: [FFBAAB90 verbose 'Default' opID=SWI-88e6d2f0] [TaskInfoChannel::GetTaskInfo] task: haTask--vim.host.VMotionManager.initiateDestination-134036228 setup for async notification
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<vim>vim\.[a-zA-Z\.]+)"
]
}
}
if [message] =~ /(?i)vmodl\./ {
# <166>2014-12-10T18:42:14.963Z esx.vmware.com Hostd: [25D95B90 info 'Vmomi'] Throw vmodl.fault.RequestCanceled
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?<vmodl>vmodl\.[a-zA-Z\.]+)"
]
}
}
if [message] =~ /(?i)Correlator/ {
# <14>2014-12-10T16:31:09.619Z esx.vmware.com vobd: [netCorrelator] 73154456us: [esx.audit.net.firewall.config.changed] Firewall configuration has changed. Operation 'add' for rule set webAccess succeeded.
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "\[(?<Correlator>.*Correlator)\]"
]
}
}
if [message] =~ /(?i)vmotion/ {
# <166>2014-12-10T18:28:11.769Z esx.vmware.com Vpxa: [FFF69B90 verbose 'Default' opID=task-internal-1-19c63550-66-6-e2-56-d2-77-90-e5] [MIGRATE] (1418236087721814) vmotion result has downtime value 284157
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "vmotion result has downtime value %{NUMBER:vmotion_downtime:int}"
]
}
}
if [message] =~ /(?i)precopyStunTime/ {
# <166>2014-12-10T18:33:19.874Z esx.vmware.com Hostd: [6A623B90 verbose 'VMotionSrc (1418236393436303)'] Set source task result precopyStunTime to 68427
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "precopyStunTime to %{NUMBER:precopyStunTime:int}"
]
}
}
if [message] =~ /(?i)failed[\s]to[\s]/ {
# <166>2015-01-05T16:34:54.973Z esx.vmware.com Hostd: [692A5B90 warning 'vm:/vmfs/volumes/c12f0795-0b6af776/vm_name/vmx_name.vmx' opID=HB-host-9145@3421082-b1b1241a-f9] Failed to unregister: vim.fault.TaskInProgress
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "(?i).*(?<failed_to>failed to [a-zA-Z ]+)"
]
add_tag => "failed_to"
}
}
#################
if [message_program] == "storageRM" {
# <14>2014-12-10T18:33:04.910Z esx.vmware.com storageRM: <datastore_name> 2707630 avglatency= 0.83 iops= 84 threshold= 30 Win = 30.00 ws= 30 devqdepth= 30 iocount= 4 noio= 0.00 coio= 0.05
grok {
tag_on_failure => []
patterns_dir => "/opt/logstash/patterns"
match => [
"message", "storageRM:[\s]+<%{GREEDYDATA:iorm_datastore}>.*avglatency=[\s]+%{NUMBER:iorm_avglatency:float}[\s]+iops=[\s]+%{NUMBER:iorm_iops:int}.*devqdepth=[\s]+%{NUMBER:iorm_devqdepth:int}"
]
add_tag => "iorm"
}
mutate {
gsub => [
"iorm_datastore", ", 0", ""
]
}
}
###
### END SEXILOG
###
if [message] =~ /Section for VMware ESX/ or [message] =~ /Vpxa: $/ or [message] =~ /Did not get any entity metrics from the host/ or [message] =~ /Failed to read header on stream/ or [message] =~ /OnEndElement: no _props to update value, skipping/ or [message] =~ /-->/ {
mutate { add_tag => "spam" }
}
if [local_message] {
if [local_message] =~ /(?i)failed/ or [local_message] =~ /(?i)error/ {
mutate {
replace => [ "syslog_level", "error" ]
#add_field => { "syslog_level" => "error" }
}
}
else if [local_message] =~ /(?i)could not/ or [local_message] =~ /(?i)not found/ {
mutate {
replace => [ "syslog_level", "warning" ]
#add_field => { "syslog_level" => "warning" }
}
}
}
mutate {
rename => [ "esx_cpu", "[esx][cpu]" ]
rename => [ "esx_datastore_name", "[esx][datastore][name]" ]
rename => [ "esx_datastore_mount_count", "[esx][datastore][mount_count]" ]
rename => [ "esx_cpu_id", "[esx][cpu_id]" ]
rename => [ "esx_vc_opID", "[esx][vc_opID]" ]
rename => [ "esx_vmkernel_opID", "[esx][vmkernel_opID]" ]
rename => [ "esx_world_id", "[esx][world_id]" ]
rename => [ "message_opID", "[esx][message][opID]" ]
rename => [ "message_service", "[esx][message][service]" ]
rename => [ "message_service_info", "[esx][message][service_info]" ]
rename => [ "message_thread_id", "[esx][message][thread_id]" ]
rename => [ "message_user", "[esx][message][user]" ]
}
}
}
filter {
if "vcsa" in [tags] {
grok {
break_on_match => true
match => [
## vpxd.log
# 2015-05-12T07:49:20.851Z [7F7679ADC700 info 'commonvpxLro' opID=276e72df] [VpxLRO] -- BEGIN task-internal-15272072 -- -- vim.PerformanceManager.queryProviderSummary -- ed47b481-688a-6be2-3170-9d36a0f1cb16(529bcc19-28a2-654f-9548-f38aa983d1df)
## vpxd-alert.log
# 2015-05-12T11:42:29.811Z [7F766B9C4700 error 'Alert' opID=task-internal-2-43cec339-70] Alert:false@ /build/mts/release/bora-2442329/bora/vpx/drs/algo/drmStorageIO.cpp:486
# 2015-05-12T13:39:17.681Z [7F7679A5B700 info 'vpxdvpxdVmomi'] [ClientAdapterBase::InvokeOnSoap] Invoke done (10.4.1.81, vmodl.query.PropertyCollector.waitForUpdates)
# 2015-05-12T14:54:09.100Z [7F766B73F700 error 'SoapAdapter']
# 2015-05-12T15:22:27.349Z [7F767A2EC700 info 'licenselicenseUtil9;] Unable to decode license 9502N-48192-18052-01P00-15PHN with error 5 using path /etc/vmware-vpx//licenses/site//VMware ESX Server/5.0/5.0.0.19
"message", "%{TIMESTAMP_ISO8601} %{SYSLOGHOST} %{TIMESTAMP_ISO8601:remote_timestamp} \[%{BASE16NUM:message_thread_id} %{WORD:syslog_level} \'%{DATA:message_service}(?:[;'])( opID=%{DATA:message_opID})?\]( (\[%{DATA:message_program}\] )?%{GREEDYDATA:message_syslog})?",
## vws.log
# [2015-05-08 00:31:50,726 Timer-0 INFO com.vmware.vim.common.vdb.VdbConfig] Overrides=
# [2015-05-08 00:33:42,161 pool-3-thread-1 INFO com.vmware.vim.cimmonitor.service.impl.HostRefresherTask] Polling health state on 10.4.1.84/host-122
## ds.log
# [2015-05-12 13:18:40,471 pool-18-thread-1 INFO com.vmware.vim.dataservices.federation.FederationReconfigurator] No peers reachable - skipping reconfiguration.
# [2015-05-12 13:18:41,658 tomcat-exec-21 INFO com.vmware.vim.query.server.servlets.QueryServlet] Received new query request from 10.0.0.153
# [2015-05-12 15:28:43,283 pool-3-thread-5 ERROR com.vmware.vim.cimmonitor.service.impl.CimMonitorImpl] Error grabbing CIM ticket on 10.4.1.84/host-122:Failed to acquire CIM ticket for host host-122
"message", "%{TIMESTAMP_ISO8601} %{SYSLOGHOST} \[(?<vcsa_timestamp>(%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME})) %{DATA:message_service} %{SPACE}?%{WORD:syslog_level}%{SPACE}? %{DATA:message_system_info}\] %{GREEDYDATA:message_syslog}",
## vmware-vpxd.log
# 2015-02-02 18:51:41 +0000 BEGIN 8163
# /etc/init.d/vmware-vpxd start
# Starting vmware-vpxd: (vmware-vpxd is already running)
# @@OP_START|0@@ Starting tomcat
# Starting tomcat: (tomcat is already running) success
# @@OP_END|0|SUCCESS@@ Starting tomcat
# @@OP_START|1@@ Registering vpxd services
# Executing startup scripts...
# Autodeploy service is disabled, skipping registration.
# @@OP_END|1|SUCCESS@@ Registering vpxd services
# 2015-02-02 18:51:42 +0000 END 8163
"message", "%{TIMESTAMP_ISO8601} %{SYSLOGHOST} %{GREEDYDATA:message_syslog}"
]
}
#if [message] =~ /\[Drm\]/ {
# grok {
# break_on_match => false
# match => [
# "message", ".*\[Drm\].*TOTAL[\s]cpu[\s]%{NUMBER:vpxd_drm_total_cpu:float}[\s]mem[\s]%{NUMBER:vpxd_drm_total_mem:float}"
# ]
# }
#}
#if [message] =~ /ScoreboardStats/ {
# grok {
# break_on_match => false
# match => [
# "message", "ScoreboardStats.*total[\s]%{NUMBER:ScoreboardStats:float}"
# ]
# }
#}
#if [message] =~ /VcServiceStats/ {
# grok {
# break_on_match => false
# match => [
# "message", "VcServiceStats.*/total[\s]%{NUMBER:VcServiceStats:float}",
# "message", "VcServiceStats.*/mean[\s]%{NUMBER:VcServiceStats:float}",
# "message", "VcServiceStats.*/max[\s]%{NUMBER:VcServiceStats:float}",
# "message", "VcServiceStats.*/min[\s]%{NUMBER:VcServiceStats:float}",
# "message", "VcServiceStats.*/numSamples[\s]%{NUMBER:VcServiceStats:float}"
# ]
# }
#}
#if [message] =~ /OpMoLockStats/ {
# grok {
# break_on_match => false
# match => [
# #"message", "OpMoLockStats.*/total[\s]%{NUMBER:OpMoLockStats:float}",
# "message", "OpMoLockStats.*/mean[\s]%{NUMBER:OpMoLockStats:float}",
# #"message", "OpMoLockStats.*/max[\s]%{NUMBER:OpMoLockStats:float}",
# "message", "OpMoLockStats.*/min[\s]%{NUMBER:OpMoLockStats:float}"
# #"message", "OpMoLockStats.*/numSamples[\s]%{NUMBER:OpMoLockStats:float}"
# ]
# }
#}
#if [message] =~ /HostStatus/ {
# grok {
# break_on_match => false
# match => [
# "message", "HostStatus.*(SpecGenNo|ChangeGenNo)[\s]%{NUMBER:HostStatus:float}",
# "message", "HostStatus.*IP[\s]%{GREEDYDATA}",
# "message", "HostStatus.*Name[\s]%{GREEDYDATA}"
# ]
# }
#}
#if [message] =~ /ResPoolStats/ {
# grok {
# break_on_match => false
# match => [
# "message", "ResPoolStats.*(total|min|mean|max)[\s]%{NUMBER:ResPoolStats:float}"
# ]
# }
#}
#if [message] =~ /OpMoLockStats/ {
# grok {
# break_on_match => false
# match => [
# "message", "OpMoLockStats.*total[\s]%{NUMBER:OpMoLockStats:float}"
# ]
# }
#}
#if [message] =~ /peer/ {
# grok {
# break_on_match => false
# match => [
# "message", "peer=%{DATA:peer}"
# ]
# }
#}
if [message] =~ /-->/ {
mutate { add_tag => [ "spam" ] }
}
if [vcsa_timestamp] {
date {
# 2015-05-12 13:18:41,321
timezone => "UTC"
match => [ "vcsa_timestamp" , "YYYY-MM-dd HH:mm:ss,SSS" ]
remove_field => [ "vcsa_timestamp", "local_timestamp" ]
}
}
}
}
filter {
if "nxlog" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
# nxlog syslog_rfc5424
#2015-04-30T15:25:15-03:00 10.0.5.200 1 2015-04-30T14:25:02.000000-04:00 jenkins Microsoft-Windows-Security-SPP 0 - [NXLOG@14506 Keywords="36028797018963968" EventType="ERROR" EventID="8198" ProviderGuid="{E23B33B0-C8C9-472C-A5F9-F2BDFEA0F156}" Version="0" Task="0" OpcodeValue="0" RecordNumber="6235" ThreadID="0" Channel="Application" EventReceivedTime="2015-04-30 14:25:02" SourceModuleName="eventlog" SourceModuleType="im_msvistalog"] License Activation (slui.exe) failed with the following error code: hr=0xC004F074 Command-line arguments: RuleId=eeba1977-569e-4571-b639-7623d8bfecc0;Action=AutoActivate;AppId=55c92734-d682-4d71-983e-d6ec3f16059f;SkuId=00091344-1ea4-4f37-b789-01750ba6988c;NotificationInterval=1440;Trigger=NetworkAvailable
#2015-05-01T20:10:48-03:00 10.0.10.49 1 2015-05-01T18:10:46.551236-05:00 VP-RBLANCHARD01 Microsoft-Windows-WMI-Activity 12728 - [NXLOG@14506 Keywords="4611686018427387904" EventType="INFO" EventID="5857" ProviderGuid="{1418EF04-B0B4-4623-BF7E-D74AB47BBDAA}" Version="0" Task="0" OpcodeValue="0" RecordNumber="331409" ThreadID="13284" Channel="Microsoft-Windows-WMI-Activity/Operational" Domain="NT AUTHORITY" AccountName="NETWORK SERVICE" UserID="NETWORK SERVICE" AccountType="Well Known Group" Opcode="Info" EventReceivedTime="2015-05-01 18:10:48" SourceModuleName="eventlog" SourceModuleType="im_msvistalog"] MSVDS__PROVIDER provider started with result code 0x0. HostProcess = wmiprvse.exe; ProcessID = 12728; ProviderPath = %systemroot%\system32\wbem\vdswmi.dll
"message", "%{TIMESTAMP_ISO8601} %{SYSLOGHOST} %{IPORHOST} %{TIMESTAMP_ISO8601:remote_timestamp} %{IPORHOST:hostname} %{PROG:message_program} %{NUMBER:message_program_pid} - (\[(?<nxlog_id>NXLOG@%{NUMBER}) %{DATA:ms_eventlog_details}\])( %{DATA:local_message}(?:\. ))?((?: )?%{GREEDYDATA:ms_eventlog_verbose})?",
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{GREEDYDATA:message_syslog}"
]
}
if [local_message] {
}
else {
if [ms_eventlog_verbose] {
mutate {
add_field => { "local_message" => "%{ms_eventlog_verbose}" }
}
}
}
if [ms_eventlog_details] {
kv {
source => "ms_eventlog_details"
target => "[eventlog]"
trimkey => "\"\{\}"
trim => "\"\{\}"
remove_field => [ "ms_eventlog_details" ]
}
}
if [ms_eventlog_verbose] {
if [ms_eventlog_verbose] =~ /This event is generated/ or [ms_eventlog_verbose] =~ /You should expect this event/ or [ms_eventlog_verbose] =~ /Guidance:/ or [ms_eventlog_verbose] =~ /provider started with result code/ or [ms_eventlog_verbose] =~ /Command-line arguments: / or [ms_eventlog_verbose] =~ /The Software Protection service has completed licensing status check/ or [ms_eventlog_verbose] =~ /The client has sent an activation request to the key management service machine/ or [ms_eventlog_verbose] =~ /The following error occurred:/ {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
"ms_eventlog_verbose", "%{DATA:ms_eventlog_msg}(?<ms_eventlog_msg_guidance>Guidance:%{GREEDYDATA})",
"ms_eventlog_verbose", "%{DATA:ms_eventlog_msg}(?<ms_eventlog_msg_desc>This event is generated%{GREEDYDATA})",
"ms_eventlog_verbose", "%{DATA:ms_eventlog_msg}(?<ms_eventlog_msg_desc>You should expect this event%{GREEDYDATA})",
"ms_eventlog_verbose", "(?<ms_eventlog_msg_summary>%{DATA} provider %{WORD} with result code %{DATA}\.) %{GREEDYDATA:ms_eventlog_msg}",
"ms_eventlog_verbose", "(?<ms_eventlog_msg_summary>%{DATA})%{SPACE}Command-line arguments:%{SPACE}%{GREEDYDATA:ms_eventlog_msg}",
"ms_eventlog_verbose", "(?<ms_eventlog_msg_summary>The Software Protection service has completed licensing status check\.) %{GREEDYDATA:ms_eventlog_msg}",
"ms_eventlog_verbose", "(?<ms_eventlog_msg_summary>The client has sent an activation request to the key management service machine\.) %{GREEDYDATA:ms_eventlog_msg}",
"ms_eventlog_verbose", "(?<ms_eventlog_msg_summary>The following error occurred: %{DATA}\.) %{GREEDYDATA:ms_eventlog_msg}"
]
}
if [ms_eventlog_msg_summary] {
mutate {
replace => [ "local_message", "%{ms_eventlog_msg_summary}" ]
}
}
if [ms_eventlog_msg] =~ /C:[\\]{1}/ {
mutate {
gsub => [ "ms_eventlog_msg", "C:[\\]{1}", "" ]
}
}
}
else if [ms_eventlog_verbose] =~ /^Id = / or [ms_eventlog_verbose] =~ /^Subject:[ ]{2}/ {
mutate {
rename => [ "ms_eventlog_verbose", "ms_eventlog_msg" ]
}
}
}
if [ms_eventlog_msg] {
if [ms_eventlog_msg] =~ /^HostProcess/ or [ms_eventlog_msg] =~ /^Id = / or [ms_eventlog_msg] =~ /provider started with result code/ or [ms_eventlog_msg] =~ /^RuleID/ {
kv {
source => "ms_eventlog_msg"
field_split => ";"
value_split => "="
trimkey => " "
trim => " "
target => "[eventlog][Message]"
}
}
else if [ms_eventlog_msg] =~ /^Subject:[ ]{2}/ {
mutate {
gsub => [ "ms_eventlog_msg", "^Subject:[ ]{2}", "" ]
gsub => [ "ms_eventlog_msg", "New Logon:[ ]{2}", "" ]
gsub => [ "ms_eventlog_msg", "Process Information:[ ]{2}", "" ]
gsub => [ "ms_eventlog_msg", "Network Information:[ ]{2}", "" ]
gsub => [ "ms_eventlog_msg", "Workstation Name:[ ]{3}", "" ]
gsub => [ "ms_eventlog_msg", "Detailed Authentication Information:[ ]{2}", "" ]
gsub => [ "ms_eventlog_msg", "Privilege[ ]{4}", "Privelege," ]
gsub => [ "ms_eventlog_msg", "[\(\)\{\}]", "" ]
gsub => [ "ms_eventlog_msg", ":[ ]{2}", ":" ]
# seems like LS has a bug wrt greedinees, hence the following hack
gsub => [ "ms_eventlog_msg", "[ ]{2}", ";" ]
gsub => [ "ms_eventlog_msg", "[ ]{1}", "" ]
gsub => [ "ms_eventlog_msg", ";$", "" ]
}
kv {
source => "ms_eventlog_msg"
value_split => ":"
field_split => ";"
target => "[eventlog][Message]"
}
}
}
if [eventlog][EventReceivedTime] {
date {
# 2015-05-14 19:59:55
match => [ "[eventlog][EventReceivedTime]" , "YYYY-MM-dd HH:mm:ss" ]
remove_field => [ "[eventlog][EventReceivedTime]", "remote_timestamp" ]
}
}
if [eventlog][EventType] {
mutate {
add_field => { "syslog_level" => "%{[eventlog][EventType]}" }
}
}
mutate {
#remove_field => [ "ms_eventlog_msg", "ms_eventlog_verbose", "ms_eventlog_msg_summary" ]
#rename => [ "[eventlog][EventReceivedTime]", "[nxlog][received]" ]
rename => [ "[eventlog][SourceModuleName]", "[nxlog][module]" ]
rename => [ "[eventlog][SourceModuleType]", "[nxlog][module_type]" ]
rename => [ "hostname", "[nxlog][hostname]" ]
rename => [ "ms_eventlog_msg", "[eventlog][Message][Message]" ]
rename => [ "ms_eventlog_msg_desc", "[eventlog][Message][Description]" ]
rename => [ "ms_eventlog_msg_details", "[eventlog][Message][Details]" ]
rename => [ "ms_eventlog_msg_summary", "[eventlog][Message][Summary]" ]
rename => [ "ms_eventlog_verbose", "[eventlog][Verbose]" ]
rename => [ "nxlog_id", "[nxlog][id]" ]
}
}
}
filter {
if "3par" in [tags] {
# 3par_class -- as extracted from raw syslog data
# alert_change
# auth_helper
# bad_cmd_req
# ccrtpg
# chstcr
# chstcr_tocsvr
# cli_auth_fail
# cli_cmd_err
# cli_command
# cli_comm_err
# cli_int_err
# cli_ssl_err
# comp_state_change
# disk_present
# disk_state_change
# disk_states
# dskabrt
# dskerr
# dskfail
# fpga_states
# fw_coredump
# host_path_change
# ide_fileintegrity
# ldmir_status
# ld_started
# lesb_error
# license
# mcu_bat_failedtest
# mcu_bat_states
# mcu_link_states
# mcu_ps_acfail
# mcu_ps_alert
# mcu_ps_dcfail
# mcu_ps_fanfail
# mcu_ps_removed
# mcu_ps_states
# mcu_ps_undervoltage
# mederr
# memory_used
# modified_scheduled_task
# new_obj
# notify_comp
# pel_error
# phys_alloc_50
# phys_alloc_ssd_50
# process_dead
# removed_obj
# rmm_link
# rmm_vvsync
# rwztte
# sched_sys_fail
# scsi
# sensor_states
# ses_event
# smag_info
# smag_resume
# smag_resume_done
# smag_start
# smag_start_done
# task_complete
# task_fail
# too_many_events
# transfer_request
# vv_copy
# vv_task
# vv_task_update
# 3par_components -- as extracted from raw syslog data -- become fields (prefixed with "3par_")
# es_replication
# hw_cage
# hw_cage_sled
# hw_disk
# hw_node
# hw_nodefan
# sw_alert
# sw_auth_helper
# sw_cage_sled
# sw_cli
# sw_host
# sw_ld
# sw_objset
# sw_os
# sw_pd
# sw_port
# sw_rmm_group
# sw_rmm
# sw_rmm_link
# sw_rmm_volume
# sw_scheduler
# sw_scsi
# sw_snap
# sw_sysmgr
# sw_task
# sw_user_conn
# sw_vlun
# sw_vv
# undefined
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
# determine %{3par_class} and %{3par_components}
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{WORD:3par_class} %{DATA:3par_components}(?: )%{GREEDYDATA:local_message}"
]
# apply %{3par_class} as a tag
add_tag => [ "%{3par_class}" ]
}
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
# determine primary component
"3par_components", "%{WORD:3par_primary_component}(?:[:;])?%{GREEDYDATA}"
]
}
mutate {
rename => [ "3par_primary_component", "[3par][primary_component]" ]
rename => [ "3par_class", "[3par][class]" ]
}
# split affected components on ","
csv {
source => "3par_components"
separator => ","
columns => [ "3par_component_0", "3par_component_1", "3par_component_2", "3par_component_3" ]
}
# breakdown components as kv with ";" and ":"
kv {
source => "3par_component_0"
field_split => ";"
value_split => ":"
#prefix => "3par_"
target => "[3par]"
}
kv {
source => "3par_component_1"
field_split => ";"
value_split => ":"
#prefix => "3par_"
target => "[3par]"
}
kv {
source => "3par_component_2"
field_split => ";"
value_split => ":"
#prefix => "3par_"
target => "[3par]"
}
kv {
source => "3par_component_3"
field_split => ";"
value_split => ":"
#prefix => "3par_"
target => "[3par]"
}
### BEGIN conditionals per 3par_components ###
#if [3par][primary_component] == "es_replication" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][es_replication]", ""
# ]
# }
#}
#if [3par][primary_component] == "hw_cage" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][hw_cage]", ""
# ]
# }
#}
#if [3par][primary_component] == "hw_cage_sled" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# # 7:11:0
# "[3par][hw_cage_sled]", "%{CTD:3par_magazine}"
# ]
# }
#}
#if [3par][primary_component] == "hw_disk" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][hw_disk]", ""
# ]
# }
#}
#if [3par][primary_component] == "hw_node" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][hw_node]", ""
# ]
# }
#}
#if [3par][primary_component] == "sw_alert" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_alert]", "",
# ]
# }
#}
#if [3par][primary_component] == "sw_auth_helper" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_auth_helper]", ""
# ]
# }
#}
#if [3par][primary_component] == "sw_cage_sled" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_cage_sled]", ""
# ]
# }
#}
if [3par][primary_component] == "sw_cli" {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => false
tag_on_failure => []
match => [
"[3par][sw_cli]", "",
# SQLite DB error executing BEGIN TRANSACTION;~INSERT INTO current.statrcvv VALUES (1429313100,'VMFS_L060_R_CYPS01','-','CG_VP_CYPS01.r45201','0','VP-74005201','Per',8,'IP',2,0,,,2605676912,1283792248287,2178243498222,0,0,0,0,0,0,0,134564,120452186112,0,0,439634218,1283792248287,2178243498222,0,0,0,0,0,0,0,0,0,0,0,0);~INSERT INTO current.statrcvv VALUES (1429313100,'VMFS_L060_R_CYPS01','-','CG_VP_CYPS01.r45201','0','VP-74005201','Per',8,'IP',2,1,,,2605763879,1284024684209,2178243498223,0,0,0,0,0,0,0,133491,121679806464,0,0,437659017,1284024684209,2178243498223,0,0,0,0,0,0,0,0,0,0,0,0);~INSERT INTO current.statrcvv VALUES (1429313100,'VMFS_L050_R_DEV01','-','CG_VP_DEV01
"local_message", "%{DATA:3par_message}(?:;)%{GREEDYDATA:3par_message_verbose}",
"3par_message", "SQLite DB %{WORD:3par_status} %{GREEDYDATA:3par_sql}",
#2015-04-17T19:48:27-03:00 3PAR_1648421 cli_cmd_err sw_cli {3parsvc super all {{0 8}} -1 10.5.1.243 20503} {Command: getstatrcopy Error: Remote copy is not started.-} {}
#2015-04-21T11:58:45-03:00 3PAR_1404738 cli_cmd_err sw_cli {3parsvc super all {{0 8}} -1 172.20.8.19 23009} {Command: getsralertcrit all Error: no criteria listed} {}
#2015-04-21T10:41:50-03:00 3PAR_1302049 cli_cmd_err sw_cli {3parsvc super all {{0 8}} -1 10.5.1.249 15571} {Command: getportdev ns -1 1:1:2 Error: Error: Unable to get device list on port 1:1:2} {}
#2015-04-17T20:28:09-03:00 3PAR_1648421 cli_command sw_cli {3paradm super all {{0 8}} -1 10.0.8.26 22632} {setcurrentdomain -} {}
#2015-04-20T13:12:00-03:00 3PAR_1302049 cli_command sw_cli {3paradm super all {{0 8}} -1 10.0.8.26 32281} {geteventlog -sec 10 -tok 0 -fnum 0 -prstate 0 -prnode 0} {}
"local_message", "{%{USERNAME:3par_user} %{WORD:3par_user_role} %{WORD:3par_user_priviliges} \{\{%{NUMBER} %{NUMBER}\}\} %{NUMBER} %{IPORHOST:3par_src_ip} %{NUMBER:3par_sw_cli_id}\} %{GREEDYDATA:3par_message_summary}"
]
}
}
#if [3par][primary_component] == "sw_host" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_host]", ""
# #"[3par][sw_host]", "%{NUMBER:3par_sw_host_id}(?::)?(%{IPORHOST:3par_sw_host_id_verbose})?"
# ]
# }
#}
#if [3par][primary_component] == "sw_ld" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_ld]", ""
# ]
# }
#}
#if [3par][primary_component] == "sw_objset" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_objset]", ""
# ]
# }
#}
#if [3par][primary_component] == "sw_os" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_os]", ""
# ]
# }
#}
#if [3par][primary_component] == "sw_pd" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_pd]", ""
# ]
# }
#}
#if [3par][primary_component] == "sw_port" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_port]", ""
# ]
# }
#}
if [3par][primary_component] == "sw_rmm" or [3par][primary_component] == "sw_rmm_link" {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => false
tag_on_failure => []
match => [
# 13:HX-V400_0_3_1
"[3par][sw_rmm_link]", "%{NUMBER:sw_rmm_link_id}:%{GREEDYDATA:sw_rmm_link_id_verbose}",
# HX-V400_1_3_1
"sw_rmm_link_id_verbose", "%{IPORHOST:rc_host}(?:_)(?<rc_nsp>%{NUMBER}_%{NUMBER}_%{NUMBER})"
]
}
if [rc_host] {
mutate {
rename => [ "rc_host", "[3par][rmm_link_tgt_host]" ]
}
}
if [rc_nsp] {
mutate {
gsub => [ "rc_nsp", "_", ":" ]
}
mutate {
rename => [ "rc_nsp", "[3par][3par_path]" ]
}
}
}
if [3par][primary_component] == "sw_rmm_group" {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => false
tag_on_failure => []
match => [
# 1:VP7400-PROD01
"[3par][sw_rmm_group]", "%{NUMBER:3par_sw_rmm_group_id}:%{GREEDYDATA:3par_sw_rmm_group_id_verbose}"
]
}
}
#if [3par][primary_component] == "sw_rmm_link" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_rmm_link]", ""
# ]
# }
#}
if [3par][primary_component] == "sw_rmm_volume" {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => false
tag_on_failure => []
match => [
# 135:VMFS_L041_REPL_7400
"[3par][sw_rmm_volume]", "%{NUMBER:3par_sw_rmm_volume_id}:%{GREEDYDATA:3par_sw_rmm_volume_id_verbose}"
]
}
}
#if [3par][primary_component] == "sw_scheduler" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_scheduler]", ""
# ]
# }
#}
#if [3par][primary_component] == "sw_scsi" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_scsi], ""
# ]
# }
#}
#if [3par][primary_component] == "sw_snap" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_snap]", ""
# ]
# }
#}
#if [3par][primary_component] == "sw_sysmgr" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_sysmgr]", ""
# ]
# }
#}
#if [3par][primary_component] == "sw_task" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_task]", ""
# ]
# }
#}
if [3par][primary_component] == "sw_user_conn" {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => false
tag_on_failure => []
match => [
"[3par][sw_user_conn]", "%{NUMBER:3par_sw_user_conn_id}:%{IPORHOST:3par_src_ip}:%{USERNAME:3par_user}(%{GREEDYDATA})?"
]
}
}
#if [3par][primary_component] == "sw_vlun" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_vlun]", ""
# ]
# }
#}
#if [3par][primary_component] == "sw_vv" {
# grok {
# patterns_dir => "/opt/logstash/patterns"
# break_on_match => false
# tag_on_failure => []
# match => [
# "[3par][sw_vv]", ""
# ]
# }
#}
### END conditionals per 3par_components ###
# BEGIN post-processing actions ###
# apply "achtung" tag
if [3par][class] =~ /(?i)abrt/ or [3par][class] =~ /(?i)alert/ or [3par][class] =~ /(?i)change/ or [3par][class] =~ /(?i)coredump/ or [3par][class] =~ /(?i)dead/ or [3par][class] =~ /(?i)err/ or [3par][class] =~ /(?i)fail/ or [3par][class] =~ /(?i)_removed/ or [local_message] =~ /(?i)degraded/ {
mutate {
add_tag => [ "achtung" ]
}
# apply syslog_level
if [3par][class] =~ /(?i)abrt/ or [3par][class] =~ /(?i)coredump/ or [3par][class] =~ /(?i)dead/ or [3par][class] =~ /(?i)err/ or [3par][class] =~ /(?i)fail/ {
mutate {
add_field => { "syslog_level" => "error" }
}
}
else {
mutate {
add_field => { "syslog_level" => "warning" }
}
}
}
if "dskerr" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => false
tag_on_failure => []
match => [
#pd 254 port a0 on 3:0:2: cmdstat:0x08 (TE_INVALID -- Invalid input parameter), scsistat:0x02 (Check condition), snskey:0x05 (Illegal request), asc/ascq:0x24/0x0 (Invalid field in cdb), info:0x0, cmd_spec:0x0, sns_spec:0x100cd, host:0x0, abort:0, CDB:3C3C0300000000000000 (Read data buffer), blk:0x0, blkcnt 0x0, fru_cd:0x0, LUN:0, LUN_WWN:0000000000000000 after 0.000s, toterr:698, deverr:7
#target SAS 50050CC1020582FA on 1:0:1: cmdstat:0x08 (TE_INVALID -- Invalid input parameter), scsistat:0x02 (Check condition), snskey:0x05 (Illegal request), asc/ascq:0x26/0x0 (Invalid field in parameter list), info:0x0, cmd_spec:0x0, sns_spec:0x0, host:0x0, abort:0, CDB:1C010201B000 (Receive diagnostic), blk:0x0, blkcnt 0x0, fru_cd:0x0, LUN:0, LUN_WWN:0000000000000000 after 0.021s, toterr:3474, deverr:3254
"local_message", "%{DATA:3par_message}(?:: )%{GREEDYDATA:3par_message_verbose}",
"local_message", "%{DATA:3par_message_summary}, %{GREEDYDATA}",
"3par_message", "%{DATA}%{CTD:3par_path}%{GREEDYDATA}"
]
}
if [3par_message_verbose] {
kv {
source => "3par_message_verbose"
field_split => ", "
value_split => ":"
#prefix => "3par_dskerr_"
target => "[3par][dskerr]"
}
}
}
# try to understand (make more fields from) some more interesting events
else if "host_path_change" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
tag_on_failure => []
match => [
# Host path status change for ESX0401084: Path to 0:2:3 Lost
"local_message", "Host path status change for %{IPORHOST:3par_sw_host_id_verbose}: Path to %{CTD:3par_path} %{GREEDYDATA:3par_status}"
]
}
if [3par_status] {
if [3par_status] !~ /(?i)Normal/ {
mutate { replace => [ "syslog_level", "error" ] }
}
}
}
else if "comp_state_change" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
tag_on_failure => []
match => [
# Port 1:1:1, Host 57(ESX0401084) Normal (Online)
# Magazine 7:11:0, Physical Disk 92 Failed (Invalid, Inquiry Failed)
# Remote Copy Volume 135(VMFS_L041_REPL_7400) Normal
# Remote Copy Group 1(VP7400-PROD01) Normal
# Cage 0, Cage Battery 0 Normal
"local_message", "Port %{CTD:3par_path}(, Host %{NUMBER}\(%{IPORHOST:3par_sw_host_id_verbose}\))? %{WORD:3par_status}( %{GREEDYDATA:3par_status_verbose})?",
"local_message", "Magazine %{CTD}, Physical Disk %{NONNEGINT} %{WORD:3par_status}( %{GREEDYDATA:3par_status_verbose})?",
"local_message", "Cage %{NUMBER}, Cage Battery %{NONNEGINT} %{WORD:3par_status}( %{GREEDYDATA:3par_status_verbose})?",
"local_message", "Remote Copy%{DATA}(?:\)) %{WORD:3par_status}"
]
}
if [3par_status] {
if [3par_status] !~ /(?i)Normal/ {
mutate { replace => [ "syslog_level", "error" ] }
}
}
else if [local_message] {
if [local_message] !~ /(?i)Normal/ {
mutate { replace => [ "syslog_level", "error" ] }
}
}
}
else if "disk_state_change" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
tag_on_failure => []
match => [
# pd wwn 2210000A330196E9 changed state from notready to new because disk did not have an assigned PD id during disk validation.
# pd 92 wwn 2210000A330057AA changed state from failed to missing because disk gone event was received for this disk.
"local_message", "%{DATA} wwn %{BASE16NUM:3par_hw_disk} changed state from %{WORD:3par_prior_status} to %{WORD:3par_status} because %{GREEDYDATA:3par_status_verbose}"
]
}
if [3par_status] {
if [3par_status] !~ /(?i)valid/ {
mutate { replace => [ "syslog_level", "error" ] }
}
}
}
else if "alert_change" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
tag_on_failure => []
match => [
# Alert 424 changed from state New to Resolved by System
# Alert 1688 changed from state Resolved by System to New
"local_message", "%{DATA} changed from state %{DATA:3par_prior_status} to %{GREEDYDATA:3par_status}"
]
}
if [3par_status] {
if [3par_status] !~ /(?i)Resolved/ {
mutate { replace => [ "syslog_level", "error" ] }
}
}
}
else if "rmm_link" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
tag_on_failure => []
match => [
"local_message", "%{DATA} has changed its status to %{WORD:3par_status}"
]
}
if [3par_status] {
if [3par_status] !~ /(?i)UP/ {
mutate { replace => [ "syslog_level", "error" ] }
}
}
}
if [3par_message_summary] {
mutate { replace => [ "local_message", "%{3par_message_summary}" ] }
}
else if [3par_message] {
mutate { replace => [ "local_message", "%{3par_message}" ] }
}
mutate {
rename => [ "3par_hw_disk", "[3par][hw_disk]" ]
rename => [ "3par_message", "[3par][message]" ]
rename => [ "3par_message_summary", "[3par][message_summary]" ]
rename => [ "3par_message_verbose", "[3par][message_verbose]" ]
rename => [ "3par_path", "[3par][path]" ]
rename => [ "3par_sql_action", "[3par][sql_action]" ]
rename => [ "3par_src_ip", "[3par][src_ip]" ]
rename => [ "3par_status", "[3par][status]" ]
rename => [ "3par_status_verbose", "[3par][status_verbose]" ]
rename => [ "3par_sw_cli_id", "[3par][sw_cli_id]" ]
rename => [ "3par_sw_host_id_verbose", "[3par][sw_host_id_verbose]" ]
rename => [ "3par_sw_rmm_group_id]", "[3par][sw_rmm_group_id]" ]
rename => [ "3par_sw_rmm_group_id_verbose]", "[3par][sw_rmm_group_id_verbose]" ]
rename => [ "3par_sw_rmm_volume_id]", "[3par][sw_rmm_volume_id]" ]
rename => [ "3par_sw_rmm_volume_id_verbose]", "[3par][sw_rmm_volume_id_verbose]" ]
rename => [ "3par_sw_user_conn_id", "[3par][sw_user_conn_id]" ]
rename => [ "3par_user", "[3par][user]" ]
rename => [ "3par_user_priviliges", "[3par][user_priviliges]" ]
rename => [ "3par_user_role", "[3par][user_role]" ]
rename => [ "sw_rmm_link_id", "[3par][rmm_link_id]" ]
rename => [ "sw_rmm_link_id_verbose", "[3par][rmm_link_id_verbose]" ]
}
if [local_message] {
# noalert
if [local_message] =~ /SQLite DB error executing BEGIN TRANSACTION/ or [local_message] =~ /removevv -expired/ or [local_message] =~ /remove_expired_vvs/ or [local_message] =~ /Error: Remote copy is not started/ or [local_message] =~ /Process vasa_https_gw has exited on node/ or [local_message] =~ /\/tmp\/excavate/ or [local_message] =~ /\/root\/cnDeleteme/ or [local_message] =~ /\/var\/core\/proc\/saved/ or [local_message] =~ /Unable to get device list on port/ or [local_message] =~ /(?i)Permission denied/ {
mutate { add_tag => [ "noalert" ] }
}
# spam
if [local_message] =~ /getqos : Permission denied/ or [local_message] =~ /Error: no criteria listed/ or [local_message] =~ /Error: Flash Cache is not present/ or [local_message] == "{setcurrentdomain -} {}" or [local_message] =~ /: connection reset by peer/ {
mutate { add_tag => [ "spam" ] }
}
}
# clean-up
mutate {
remove_field => [ "3par_components", "3par_component_0", "3par_component_1", "3par_component_2", "3par_component_3", "3par_sql_message", "3par_dskerr_message", "3par_dskerr_message_ext", "3par_sw_cli_command" ]
remove_tag => [ "%{[3par][class]}" ]
}
# END post-processing actions ###
}
}
filter {
if "brocade" in [tags] {
#codec => plain { charset => "ISO-8859-1" }
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
# http://www.brocade.com/downloads/documents/product_manuals/B_SAN/FOS_MessageRef_v710.pdf
# RAS
## <timestamp>, [<Event ID>], <Sequence Number>, <Flags>, <Severity>, <Switch name>, <Event-specific information>
# AUDIT
## <Sequence Number> AUDIT, <timestamp>, [<Event ID>], <Severity>, <Event Class>, <User ID>/<Role>/<IP address>/<Interface>/<Application Name>, <Admin Domain>/<Switch name>, <Reserved field for future expansion>, <Event-specific information>
#2015-04-24T14:27:44-03:00 10.5.1.32 raslogd: 2015/04/24-20:09:24, [SEC-1203], 7769, WWN 10:00:50:eb:1a:0f:2d:ef | FID 128, INFO, fabA-VPid32, Login information: Login successful via TELNET/SSH/RSH. IP Addr: 172.22.33.71
#2015-04-24T14:38:18-03:00 10.5.1.32 raslogd: AUDIT, 2015/04/24-20:19:58 (GMT), [RAS-2001], INFO, SYSTEM, administrator/admin/172.22.33.71/ssh/CLI, ad_0/fabA-VPid32/FID 128, 7.0.2c, , , , , , , Audit message log is enabled.
#2015-04-24T13:41:51-03:00 172.20.8.40 raslogd: 2015/04/24-15:51:08, [CONF-1001], 31159, WWN 10:00:00:05:33:7e:2f:5a | FID 128, INFO, hx-fabAid40, configUpload completed successfully for all.
#2015-04-24T14:34:00-03:00 172.20.8.40 raslogd: AUDIT, 2015/04/24-16:43:17 (GMT), [RAS-2001], INFO, NONE, admin/admin/172.22.33.71/ssh/CLI, ad_0/hx-fabAid40/FID 128, , Audit message log is enabled.
#2015-04-30T11:26:55-03:00 10.5.1.33 raslogd: 2015/04/30-14:07:37, [TRCK-1001], 84672130, WWN 10:00:50:eb:1a:0e:eb:b6 | FID 128, INFO, fabB-VPid33, Successful login by user administrator.
#2015-04-30T11:26:56-03:00 10.5.1.33 raslogd: 2015/04/30-14:07:39, [SEC-1203], 84672131, WWN 10:00:50:eb:1a:0e:eb:b6 | FID 128, INFO, fabB-VPid33, Login information: Login successful via TELNET/SSH/RSH. IP Addr: 172.22.33.71
#2015-04-30T11:26:56-03:00 10.5.1.33 raslogd: AUDIT, 2015/04/30-14:07:39 (GMT), [SEC-3020], INFO, SECURITY, administrator/admin/172.22.33.71/ssh/CLI, ad_0/fabB-VPid33/FID 128, 7.0.2c, , , , , , , Event: login, Status: success, Info: Successful login attempt via REMOTE, IP Addr: 172.22.33.71.
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{MYSYSLOGPROG}: ((?:%{WORD:syslog_facility},%{SPACE}))?(?<brocade_timesamp>%{YEAR}/%{MONTHNUM}/%{MONTHDAY}-%{TIME}((?:%{SPACE}\(%{TZ}\))?))?, \[%{DATA:brocade_event_id }\], %{GREEDYDATA:brocade_message}"
]
}
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
# 31165, WWN 10:00:00:05:33:7e:2f:5a | FID 128, INFO, hx-fabAid40, Audit message class configuration has been changed to 1,2,3,4,5.
"brocade_message", "%{NUMBER:brocade_seq}, WWN %{WWN:brocade_wwn} \| FID %{NUMBER:brocade_fabric_id}, %{WORD:syslog_level}, %{IPORHOST:brocade_hostname}, %{GREEDYDATA:local_message}",
# INFO, NONE, admin/admin/172.22.33.71/ssh/CLI, ad_0/hx-fabAid40/FID 128, , Audit message log is enabled.
# INFO, SYSTEM, administrator/admin/172.22.33.71/ssh/CLI, ad_0/fabA-VPid32/FID 128, 7.0.2c, , , , , , , Audit message class configuration has been changed to 1,2,3,4,5,6,7.
# INFO, SECURITY, \\\\236\\xE9e/admin/10.0.10.47/http/WebTools, ad_0/fabA-VPid32/FID 128, 7.0.2c, , , , , , , Event: login, Status: failed, Info: Failed login attempt via HTTP, IP Addr: 10.0.10.47.
"brocade_message", "%{WORD:syslog_level}, %{WORD:syslog_facility}, %{DATA:brocade_user}/%{WORD:brocade_user_role}/%{IPORHOST:brocade_user_connection_from}/%{WORD:brocade_user_connection_interface}/%{WORD:brocade_user_application}, %{WORD:brocade_admin_domain}/%{IPORHOST:brocade_hostname}/FID %{NUMBER:brocade_fabric_id}, %{DATA:brocade_fos_version}, %{GREEDYDATA:local_message}"
]
}
if [local_message] {
if [local_message] =~ /^, , , , , , Event: / {
mutate {
# , , , , , , Event: login, Status: success, Info: Successful login attempt via REMOTE, IP Addr: 172.22.33.71.
gsub => [ "local_message", "^, , , , , , ", "" ]
}
kv {
# Event: login, Status: success, Info: Successful login attempt via REMOTE, IP Addr: 172.22.33.71.
source => "local_message"
field_split => ","
value_split => ":"
target => "[brocade][event]"
}
}
if [local_message] =~ /Security violation/ {
mutate { replace => [ "syslog_level", "error" ] }
}
}
if [brocade_timestamp] {
date {
timezone => "UTC"
# 2015/06/01-17:03:59
# 2015/04/24-16:43:17 (GMT)
# WHY DOESN'T THIS WORK?!
match => [ "brocade_timestamp" , "YYYY/MM/dd-HH:mm:ss" ]
remove_field => [ "brocade_timestamp", "remote_timestamp" ]
}
}
mutate {
remove_field => [ "brocade_message" ]
rename => [ "brocade_admin_domain", "[brocade][admin_domain]" ]
rename => [ "brocade_event_id", "[brocade][event_id]" ]
rename => [ "brocade_fabric_id", "[brocade][fabric_id]" ]
rename => [ "brocade_flags", "[brocade][flags]" ]
rename => [ "brocade_fos_version", "[brocade][fos_version]" ]
rename => [ "brocade_hostname", "[brocade][hostname]" ]
#rename => [ "brocade_message", "[brocade][message]" ]
rename => [ "brocade_seq", "[brocade][seq]" ]
rename => [ "brocade_user", "[brocade][user][user]" ]
rename => [ "brocade_user_application", "[brocade][user][application]" ]
rename => [ "brocade_user_connection_from", "[brocade][user][connection_from]" ]
rename => [ "brocade_user_connection_interface", "[brocade][user][connection_interface]" ]
rename => [ "brocade_user_role", "[brocade][user][role]" ]
rename => [ "brocade_wwn", "[brocade][wwn]" ]
}
}
}
filter {
if "ucs" in [tags] {
if "ucsmanager" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
# http://www.cisco.com/c/en/us/td/docs/unified_computing/ucs/sw/ucsm_syslog/b_Monitoring_Cisco_UCSM_Using_Syslog/b_Monitoring_Cisco_UCSM_Using_Syslog_chapter_01.html
# 2015-04-21T08:15:15-03:00 10.0.1.10 svc_cor_dme[7688]: [F10000336][critical][speed-reduced][sys/switch-A/stor-part-bootflash] Disk read speed for partition bootflash is less than 75 MB/s
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{PROG:message_program}(?:\[%{POSINT:message_program_pid}\])?: \[%{WORD:ucs_fault_id}\]\[%{WORD:ucsm_severity}\]\[%{DATA:ucs_code}\]\[%{DATA:ucs_device}\] %{GREEDYDATA:message_syslog}"
]
}
}
else {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
#2015-04-20T14:08:57-03:00 10.5.3.252 2015 Apr 20 17:08:56 UTC: %AUTHPRIV-5-SYSTEM_MSG: root : TTY=console ; PWD=/var/sysmgr/sysmgr-subproc ; USER=root ; COMMAND=/sbin/sysctl -q -w vm.drop_caches=3 - sudo
#2015-04-20T14:22:24-03:00 10.5.3.252 2015 Apr 20 17:22:23 UTC: %DAEMON-6-SYSTEM_MSG: DHCPREQUEST for 127.6.224.15 from a8:9d:21:92:96:0e via vlan4044 - dhcpd
#2015-04-20T14:22:30-03:00 10.5.3.252 2015 Apr 20 17:22:29 UTC: %LOCAL7-5-SYSTEM_MSG: Starting reconfiguration - xinetd[4906]
#2015-04-20T14:22:30-03:00 10.5.3.252 2015 Apr 20 17:22:29 UTC: %LOCAL7-3-SYSTEM_MSG: Server /usr/sbin/in.rexecd is not executable [line=12] - xinetd[4906]
#2015-04-20T14:22:30-03:00 10.5.3.252 2015 Apr 20 17:22:29 UTC: %LOCAL7-4-SYSTEM_MSG: Bad service flag: IPv4 [line=12] - xinetd[4906]
#2015-04-20T14:22:30-03:00 10.5.3.252 2015 Apr 20 17:22:29 UTC: %DAEMON-3-SYSTEM_MSG: exec disabled, removing - xinetd[4906]
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{YEAR} %{MONTH} %{MONTHDAY} %{TIME} %{TZ}: \%%{WORD:syslog_facility}-%{NONNEGINT:ucsm_severity}-%{WORD:ucs_code}: %{DATA:message_syslog}?( - %{WORD:message_program}(\[%{NUMBER:message_program_pid}\])?)"
]
}
}
mutate {
rename => [ "ucs_fault_id", "[ucs][fault_id]" ]
rename => [ "ucsm_severity", "[ucs][ucsm_severity]" ]
rename => [ "ucs_code", "[ucs][code]" ]
rename => [ "ucs_device", "[ucs][device]" ]
}
#0 Emergency emerg (panic) Emergency messages indicate that the system is unusable. A panic condition usually affects multiple applications, servers, or sites. Emergency messages can be set to notify all technical staff members who are on call.
#1 Alert alert Alert messages indicate that action must be taken immediately; staff members who can fix the problem must be notified. An example of an alert message would be the loss of a primary ISP connection.
#2 Critical crit Critical messages indicate conditions that should be corrected immediately, and also indicate failure in a secondary system. An example of a critical message would be the loss of a backup ISP connection.
#3 Error err Error messages indicate non-urgent failures. Error messages should be relayed to developers or network administrators, and must be resolved within a specific timeframe.
#4 Warning warning (warn) Warning messages indicate that that an error will occur if action is not taken, for example, the file system is 85% full. Warnings faults also must be resolved within a specific timeframe.
#5 Notice notice Notice messages indicate events that are unusual but that are not error conditions. They can be summarized in an email to developers or administrators to spot potential problems, but no immediate action is necessary.
#6 Informational info Informational messages are associated with normal operational behavior. They may be tracked for reporting, measuring throughput, or other purposes, but no action is required.
#7 Debug debug Debug messages are useful to developers for debugging the application, but are not useful for tracking operations.
if [ucs][ucsm_severity] {
if [ucs][ucsm_severity] == 0 {
mutate {
add_field => { "syslog_level" => "emerg" }
add_tag => "achtung"
}
}
else if [ucs][ucsm_severity] == 1 {
mutate {
add_field => { "syslog_level" => "alert" }
add_tag => "achtung"
}
}
else if [ucs][ucsm_severity] == 2 {
mutate {
add_field => { "syslog_level" => "crit" }
add_tag => "achtung"
}
}
else if [ucs][ucsm_severity] == 3 {
mutate {
add_field => { "syslog_level" => "err" }
add_tag => "achtung"
}
}
else if [ucs][ucsm_severity] == 4 {
mutate {
add_field => { "syslog_level" => "warn" }
add_tag => "achtung"
}
}
else if [ucs][ucsm_severity] == 5 {
mutate {
add_field => { "syslog_level" => "notice" }
}
}
else if [ucs][ucsm_severity] == 6 {
mutate {
add_field => { "syslog_level" => "info" }
}
}
else if [ucs][ucsm_severity] == 7 {
mutate {
add_field => { "syslog_level" => "debug" }
}
}
}
}
}
filter {
if "nexus" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
# The syslog format on a remote-logging server is:
# month dd hh:mm:ss IP-addr-switch : year month day hh:mm:ss Timezone: facility-severity-MNEMONIC description
# or
# month dd hh:mm:ss IP-addr-switch : year month day hh:mm:ss Timezone: facility-SLOTnumber-severity-MNEMONIC description
# or
# month dd hh:mm:ss IP-addr-switch : year month day hh:mm:ss Timezone: facility-STANDBY-severity-MNEMONIC description
#2015-05-07T09:12:39-03:00 10.90.9.11 2015 May 7 08:12:39 AST: %ETHPORT-3-IF_ERROR_VLANS_ERROR: VLANs 41 on Interface port-channel19 are in error state. (Reason: Vlan is not allowed on Peer-link)
#2015-05-07T09:12:40-03:00 10.90.9.11 2015 May 7 08:12:40 AST: %ETHPORT-3-IF_ERROR_VLANS_REMOVED_ERRORLIST: VLANs 41 on Interface port-channel2 are removed from error state. (Reason: SUCCESS)
#2015-05-07T09:27:34-03:00 10.90.9.11 2015 May 7 08:27:34 AST: %VSHD-5-VSHD_SYSLOG_CONFIG_I: Configured from vty by admin on 10.30.1.40@pts/0
#2015-05-06T12:51:01-03:00 10.90.9.11 2015 May 6 11:51:01 AST: %LLDP-FEX108-5-SERVER_REMOVED: Server with Chassis ID 0022.681e.e3cc Port ID 0022.681e.e3cc on local port Eth108/1/1 has been removed
#2015-05-06T13:09:29-03:00 10.90.9.11 2015 May 6 12:09:29 AST: last message repeated 1 time
#2015-05-06T13:17:26-03:00 10.90.9.11 2015 May 6 12:17:26 AST: %ARP-3-DUP_VADDR_SRC_IP: arp [3952] Source address of packet received from 0000.0c07.ac01 on Vlan54(Ethernet3/20) is duplicate of local virtual ip, 10.5.4.1
#2015-05-06T13:09:47-03:00 10.90.9.11 2015 May 6 12:09:47 AST: %ETHPORT-3-IF_ERROR_VLANS_REMOVED_ERRORLIST: VLANs 54 on Interface port-channel18 are removed from error state. (Reason: SUCCESS)
#2015-05-07T16:10:47-03:00 10.90.9.11 2015 May 7 15:10:47 AST: %ETH_PORT_CHANNEL-5-CREATED: port-channel10 created
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{YEAR} %{MONTH} (%{SPACE})?%{MONTHDAY} %{TIME} %{TZ}: \%%{WORD:syslog_facility}-(%{DATA:nexus_slot}-)?%{POSINT:syslog_severity}-%{DATA:nexus_mnemonic}: %{GREEDYDATA:local_message}",
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{YEAR} %{MONTH} (%{SPACE})?%{MONTHDAY} %{TIME} %{TZ}: %{GREEDYDATA:local_message}"
]
}
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => false
tag_on_failure => []
match => [
"local_message", "(%{DATA})?(?<nexus_port>Ethernet%{NUMBER}/%{NUMBER}/%{NUMBER})%{GREEDYDATA}",
"local_message", "(%{DATA})?(?<nexus_port_channel>port-channel%{NUMBER})%{GREEDYDATA}"
]
}
if [local_message] {
if [local_message] =~ /pam_aaa:Authentication / {
mutate {
gsub => [ "local_message", "^pam_aaa:", "" ]
}
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
tag_on_failure => []
match => [
"local_message", "%{DATA} user %{USERNAME:nexus_user} from %{IPORHOST:nexus_src_ip} - %{MYSYSLOGPROG}"
]
}
}
if [local_message] =~ /^Configured from/ {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
tag_on_failure => []
match => [
"local_message", "Configured from %{WORD:nexus_console_type} by %{USERNAME:nexus_user} on %{IPORHOST:nexus_src_ip}@%{GREEDYDATA:nexus_console}"
]
}
}
mutate {
rename => [ "nexus_slot", "[nexus][slot]" ]
rename => [ "nexus_mnemonic", "[nexus][mnemonic]" ]
rename => [ "nexus_console", "[nexus][console]" ]
rename => [ "nexus_console_type", "[nexus][console_type]" ]
rename => [ "nexus_user", "[nexus][user]" ]
rename => [ "nexus_src_ip", "[nexus][src_ip]" ]
#rename => [ "nexus_interface", "[nexus][interface]" ]
rename => [ "nexus_port", "[nexus][port]" ]
rename => [ "nexus_port_channel", "[nexus][port_channel]" ]
}
}
if [nexus][port_channel] {
if [nexus][port] {
mutate {
add_field => { "[nexus][interface]" => "%{[nexus][port]}" }
}
}
else {
mutate {
add_field => { "[nexus][interface]" => "%{[nexus][port_channel]}" }
}
}
}
if [nexus][port] {
if [nexus][port_channel] {
# the above if clause should have handled this case
}
else {
mutate {
add_field => { "[nexus][interface]" => "%{[nexus][port]}" }
}
}
}
# 0 - emergency System unusable
# 1 - alert Immediate action needed
# 2 - critical Critical condition
# 3 - error Error condition
# 4 - warning Warning condition
# 5 - notification Normal but significant condition
# 6 - informational Informational message only
# 7 - debugging Appears during debugging only
if [syslog_severity] {
if [syslog_severity] == "0" {
mutate {
add_field => { "syslog_level" => "emerg" }
add_tag => "achtung"
}
}
else if [syslog_severity] == "1" {
mutate {
add_field => { "syslog_level" => "alert" }
add_tag => "achtung"
}
}
else if [syslog_severity] == "2" {
mutate {
add_field => { "syslog_level" => "crit" }
add_tag => "achtung"
}
}
else if [syslog_severity] == "3" {
mutate {
add_field => { "syslog_level" => "err" }
add_tag => "achtung"
}
}
else if [syslog_severity] == "4" {
mutate {
add_field => { "syslog_level" => "warn" }
add_tag => "achtung"
}
}
else if [syslog_severity] == "5" {
mutate {
add_field => { "syslog_level" => "notice" }
}
}
else if [syslog_severity] == "6" {
mutate {
add_field => { "syslog_level" => "info" }
}
}
else if [syslog_severity] == "7" {
mutate {
add_field => { "syslog_level" => "debug" }
}
}
mutate {
remove_field => [ "syslog_severity" ]
}
}
}
}
filter {
if "riverbed" in [tags] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
match => [
#2015-04-21T15:44:01-03:00 vp-sha505001 /USR/SBIN/CROND[18876]: (root) CMD (/sbin/webasd_watchdog.sh > /dev/null 2>&1)
#2015-04-21T15:50:01-03:00 vp-sha505001 periodic_raidcheck: Initial state: Idle
#2015-04-21T15:58:08-03:00 vp-sha505001 sport[17247]: [smbsign_sfe.WARN] 178929737 {172.18.1.167:56948 10.0.0.203:139} Could not establish secure inner channel.
#2015-04-21T15:32:47-03:00 vp-sha505001 sport[17247]: [splice/probe.INFO] 0 {- -} (locl: 10.254.254.11:0 clnt: 172.22.2.213:49488 serv: 10.0.0.20:4560) init
#2015-04-21T14:44:47-03:00 vp-sha505001 sport[17247]: [splice/server.INFO] 178910314 {172.20.0.203:63615 10.0.0.203:389} fini client 172.20.0.203:63615 server 10.0.0.203:389 cfe 10.254.254.27:40588 sfe 10.254.254.11:7800 app TCP
#2015-04-21T15:58:09-03:00 vp-sha505001 sport[17247]: [sslinnerchan/server/consumer/server.WARN] 178929746 {172.18.1.167:56949 10.0.0.203:139} SSLTunConsumer::message(): Received a request for encryption on inner channel: source:
#2015-04-21T14:51:13-03:00 vp-sha505001 qosd[27273]: [qosd.NOTICE]: navl_conn_init failed on: 1002225466 172.16.100.157:20927->172.20.16.11:389 sport:0 tcp:1 err:17
#2015-04-21T16:05:41-03:00 vp-sha505001 sport[17247]: [splice/server.INFO] 178931678 {- -} unknown VLAN ID for connection (client: 172.20.0.10:6577 server: 10.0.0.191:80). defaulting to 0.
#2015-04-21T14:48:24-03:00 vp-sha505001 kernel: [intercept.INFO] Default gateway on the lan side while trying to send pure syn 172.20.1.117:65430 -> 10.7.1.12:3260
#2015-04-21T15:55:01-03:00 vp-sha505001 kernel: [intercept.INFO] probe response pxy 10.254.254.27:7800 172.20.16.11:88 -> 172.16.100.114:3612 has a <nonexistent> entry
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{MYSYSLOGPROG}: ?(\[%{DATA:syslog_facility}.%{WORD:syslog_level}\](?::)?)?(%{SPACE})?(?:%{NUMBER})?(%{SPACE})?(\{%{DATA:riverbed_details_A}\})?(%{SPACE})?(\(%{DATA:riverbed_details_B}\))?(%{SPACE})?(%{GREEDYDATA:local_message})?",
"message", "%{TIMESTAMP_ISO8601:remote_timestamp} %{SYSLOGHOST} %{GREEDYDATA:message_syslog}"
]
}
if [riverbed_details_A] {
if [riverbed_details_A] == "- -" or [riverbed_details_A] == "[::]:0 [::]:0" {
mutate {
remove_field => [ "riverbed_details_A" ]
}
}
}
if [riverbed_details_B] {
if [riverbed_details_B] == "root" {
mutate {
remove_field => [ "riverbed_details_B" ]
}
}
}
if [riverbed_details_A] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
tag_on_failure => []
match => [
#172.20.0.203:63615 10.0.0.203:389
"riverbed_details_A", "%{IP:riverbed_src_ip}:%{NUMBER:riverbed_src_port} %{IP:riverbed_tgt_ip}:%{NUMBER:riverbed_tgt_port}"
]
}
}
else if [riverbed_details_B] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
tag_on_failure => []
match => [
#locl: 10.254.254.11:0 clnt: 172.22.2.213:49488 serv: 10.0.0.20:4560
"riverbed_details_B", "locl: %{IP:riverbed_pxy_ip}:%{NUMBER:riverbed_pxy_port} clnt: %{IP:riverbed_src_ip}:%{NUMBER:riverbed_src_port} serv: %{IP:riverbed_tgt_ip}:%{NUMBER:riverbed_tgt_port}"
]
}
}
else if [local_message] {
grok {
patterns_dir => "/opt/logstash/patterns"
break_on_match => true
tag_on_failure => []
match => [
#probe response pxy 10.254.254.27:7800 172.20.16.11:88 -> 172.16.100.114:3612 has a <nonexistent> entry
"local_message", "(%{DATA})?%{IP:riverbed_pxy_ip}:%{NUMBER:riverbed_pxy_port}(%{DATA})?%{IP:riverbed_src_ip}:%{NUMBER:riverbed_src_port}(%{DATA})?%{IP:riverbed_tgt_ip}:%{NUMBER:riverbed_tgt_port}(%{GREEDYDATA})",
#Default gateway on the lan side while trying to send pure syn 172.20.1.117:65430 -> 10.7.1.12:3260
#unknown VLAN ID for connection (client: 172.20.0.10:6577 server: 10.0.0.191:80). defaulting to 0.
#navl_conn_init failed on: 1002225466 172.16.100.157:20927->172.20.16.11:389 sport:0 tcp:1 err:17
"local_message", "(%{DATA})?%{IP:riverbed_src_ip}:%{NUMBER:riverbed_src_port}(%{DATA})?%{IP:riverbed_tgt_ip}:%{NUMBER:riverbed_tgt_port}(%{GREEDYDATA})"
]
}
}
if [local_message] {
if [local_message] == "init" or [local_message] == "fini" {
mutate { add_tag => [ "spam" ] }
}
}
mutate {
gsub => [ "local_message", "^- {- -}", "" ]
remove_field => [ "riverbed_details_A", "riverbed_details_B" ]
rename => [ "riverbed_src_ip]", "[riverbed][src_ip]" ]
rename => [ "riverbed_tgt_ip]", "[riverbed][tgt_ip]" ]
rename => [ "riverbed_pxy_ip]", "[riverbed][pxy_ip]" ]
rename => [ "riverbed_src_port]", "[riverbed][src_port]" ]
rename => [ "riverbed_tgt_port]", "[riverbed][tgt_port]" ]
rename => [ "riverbed_pxy_port]", "[riverbed][pxy_port]" ]
}
}
}
filter {
if [message] =~ /last message repeated/ {
mutate { add_tag => "spam" }
}
if "spam" in [tags] {
drop {}
}
if [message_syslog] and [local_message] {
mutate {
replace => [ "message", "%{message_syslog}" ]
remove_field => [ "message_syslog", "local_message" ]
}
}
else if [message_syslog] {
mutate {
replace => [ "message", "%{message_syslog}" ]
remove_field => [ "message_syslog" ]
}
}
else if [local_message] {
mutate {
replace => [ "message", "%{local_message}" ]
remove_field => [ "local_message" ]
}
}
mutate {
remove_tag => [ "unclassified" ]
}
if [remote_timestamp] {
date {
match => [ "remote_timestamp", "ISO8601" ]
remove_field => [ "remote_timestamp" ]
}
}
}
filter {
if [syslog_level] and [syslog_level] =~ /(?i)err/ and "noalert" not in [tags] {
if [message] and [host] {
throttle {
before_count => -1
after_count => 1
#before_count => 3
#after_count => 5
period => 3600
key => "%{host}%{message}"
add_tag => "throttled"
}
}
else {
mutate {
add_tag => "throttle_debug"
}
}
}
}
output {
elasticsearch {
host => "127.0.0.1"
workers => "2"
}
#stdout {
# codec => rubydebug
#}
if [syslog_level] and "throttled" not in [tags] and "noalert" not in [tags] {
if [syslog_level] =~ /(?i)err/ {
email {
subject => "Logstash alert: syslog %{syslog_level} from %{host}"
from => "logstash@MY.DOMAIN"
replyto => "no-reply@MY.DOMAIN"
to => "SAD_RECIPIENT"
via => smtp
options => {
smtpIporHost => "MY.SMTP.RELAY"
}
body => "Host: %{host}\nDate: %{@timestamp}\nMessage: %{message}"
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment