Skip to content

Instantly share code, notes, and snippets.

@samuraiii
Created April 7, 2016 09:11
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save samuraiii/af14ae427a8aaaf8ecd552efe4899378 to your computer and use it in GitHub Desktop.
Save samuraiii/af14ae427a8aaaf8ecd552efe4899378 to your computer and use it in GitHub Desktop.
2014-04-07 logstash.conf
input {
beats {
port => 6023
ssl => true
ssl_certificate => "/etc/logstash/lumberjack.crt"
ssl_key => "/etc/logstash/lumberjack.key"
}
## kvůli logstash nagios checku
file {
path => "/tmp/logstash-monitor-logstash.in"
type => "nagios_check_logstash"
start_position => "beginning"
}
}
###############################################################################
## Filtry
###############################################################################
filter {
mutate {
add_field =>
{
"[@metadata][index_version]" => "002"
}
}
if [type] == "nagios_check_logstash" or [@metadata][beat] == "topbeat" or [@metadata][beat] == "packetbeat" {
}
else {
ruby {
code => '(m = event["message"][0..10239]; p = event["message"].bytesize; event["message"] = event.sprintf("#{m}... (truncated - size #{p}B - message size exceeded!!!)")) if event["message"].bytesize > 10240'
}
if [message] =~ /\.\.\. \(truncated - size \d+B - message size exceeded\!\!\!\)$/ {
mutate {
add_tag => [ "truncated" ]
}
}
if [@metadata][type] {
mutate {
replace =>
[
"type",
"%{[@metadata][type]}"
]
}
}
mutate {
add_field => {
"indexer" => "elastic01"
"ls_instance" => "logstash"
"index_time" => "%{+YYYY-MM-dd'T'HH:mm:ss}"
"[@metadata][timestamp]" => "%{+YYYY-MM-dd'T'HH:mm:ss}"
}
gsub => [
"message",
"\n",
" "
]
remove_tag =>
[
"_grokparsefailure_sysloginput",
"_grokparsefailure",
"beats_input_codec_plain_applied"
]
}
if [type] == "logstash" {
}
else if [type] == "kibana" {
if [message] =~ /^{/ {
json {
source => "message"
}
}
mutate {
gsub =>
[
"timestamp",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"timestamp",
"ISO8601"
]
}
}
else if [type] == "elasticsearch" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"\[%{TIMESTAMP_ISO8601:[@metadata][timestamp]}\] \[%{LOGLEVEL:loglevel}\] \[%{NOTSPACE:action}\s+\] \[%{PROG:program}\] \[%{NOTSPACE:[es][index]}\]\[%{NUMBER:[es][shard]}\]\s*%{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
convert => {
"[es][shard]" => "integer"
}
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyy-MM-dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "anaconda" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"\[%{TIMESTAMP_ISO8601:[@metadata][timestamp]}\] %{LOGLEVEL:loglevel} *:? %{PROG:program}: %{MESSAGE:[@metadata][message]}",
"%{TIME:[@metadata][timestamppp]} %{LOGLEVEL:loglevel} *: %{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
if [@metadata][timestamppp] {
mutate {
replace =>
[
"[@metadata][timestamp]",
"%{+YYYY-MM-dd} %{[@metadata][timestamppp]}"
]
}
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"HH:mm:ss",
"yyyy-MM-dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "audit" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"type=%{NOTSPACE:[audit][type]} msg=audit\(%{NUMBER:[@metadata][timestamp]}:%{NUMBER:[audit][id]}\):(?: (?:user|avc: %{NOTSPACE:[audit][avc_result]} \{ %{NOTSPACE:[audit][avc_action]} \} for))?%{MESSAGE:[@metadata][message]"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
kv {
field_split => "/"
target => "audit"
source => "[@metadata][message]"
recursive => "true"
}
mutate {
rename => {
"[audit][new-disk]" => "[audit][new_disk]"
"[audit][new-gid]" => "[audit][new_gid]"
"[audit][new-uid]" => "[audit][new_uid]"
"[audit][new-mem]" => "[audit][new_mem]"
"[audit][new-net]" => "[audit][new_net]"
"[audit][new-vcpu]" => "[audit][new_vcpu]"
"[audit][old-disk]" => "[audit][old_disk]"
"[audit][old-mem]" => "[audit][old_mem]"
"[audit][old-net]" => "[audit][old_net]"
"[audit][old-vcpu]" => "[audit][old_vcpu]"
"[audit][ino]" => "[audit][inode]"
"[audit][tty]" => "[audit][terminal]"
}
convert => {
"[audit][auid]" => "integer"
"[audit][capability]" => "integer"
"[audit][egid]" => "integer"
"[audit][euid]" => "integer"
"[audit][fsgid]" => "integer"
"[audit][fsuid]" => "integer"
"[audit][gid]" => "integer"
"[audit][id]" => "integer"
"[audit][inode]" => "integer"
"[audit][inode_gid]" => "integer"
"[audit][inode_uid]" => "integer"
"[audit][items]" => "integer"
"[audit][list]" => "integer"
"[audit][mode]" => "integer"
"[audit][new_gid]" => "integer"
"[audit][new_mem]" => "integer"
"[audit][new_uid]" => "integer"
"[audit][new_vcpu]" => "integer"
"[audit][oauid]" => "integer"
"[audit][obj_gid]" => "integer"
"[audit][obj_uid]" => "integer"
"[audit][ogid]" => "integer"
"[audit][old_mem]" => "integer"
"[audit][old_vcpu]" => "integer"
"[audit][opid]" => "integer"
"[audit][oses]" => "integer"
"[audit][ouid]" => "integer"
"[audit][pid]" => "integer"
"[audit][ppid]" => "integer"
"[audit][sagid]" => "integer"
"[audit][sauid]" => "integer"
"[audit][ses]" => "integer"
"[audit][sgid]" => "integer"
"[audit][sig]" => "integer"
"[audit][success]" => "boolean"
"[audit][suid]" => "integer"
"[audit][syscall]" => "integer"
"[audit][uid]" => "integer"
}
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"UNIX",
"ISO8601"
]
}
}
else if [type] == "bdii" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{TIMESTAMP_ISO8601:[@metadata][timestamp]}: \[%{LOGLEVEL:loglevel}\] %{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyy-MM-dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "cleanup_grid_accounts" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{MESSAGE:[@metadata][message]}"
]
}
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyy-MM-dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "cron" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{SYSLOGTIMESTAMP:[@metadata][timestamp]} %{HOST_LOCAL} %{SYSLOGPROG:program}: \(%{USER:[cron][user]}\) %{NOTSPACE:action} \((?:%{USER:[cron][affected_user]}|%{MESSAGE:[cron][command]})\)"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
"",
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 "
]
}
date {
match =>
[
"[@metadata][timestamp]",
"MMM dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "dmesg" {
}
else if [type] == "dmlite" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{SYSLOGTIMESTAMP:[@metadata][timestamp]} %{HOST_LOCAL} %{SYSLOGPROG:program}: %{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
if [program] =~ /xrootd/ {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][message]" =>
[
"(?:.* Mysql createSecurityContext : )%{CERT:[@metadata][cert]}"
]
}
tag_on_failure => [ ]
}
}
else {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][message]" =>
[
"(?:.* )%{FARM_FILE}(?: .*)",
"(?:.* Mysql createSecurityContext : )%{CERT:[@metadata][cert]} %{HOST_REMOTE}",
"(?:.* Could not map )%{CERT:[@metadata][cert]}(?: .*)"
]
}
tag_on_failure => [ ]
}
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
"",
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 "
]
}
date {
match =>
[
"[@metadata][timestamp]",
"MMM dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "gridftp" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"(?:\[\")?DATE=%{TIMESTAMP_ISO8601_STR:[@metadata][timestamp]}(?:.%{NUMBER})? HOST\=%{HOST_LOCAL} PROG\=%{PROG:program} %{MESSAGE:[@metadata][message]}(?:\"\])",
"\[%{INT:pid}\] %{DMDTY:[@metadata][timestamp]}(?: :+)? %{MESSAGE:[@metadata][message]}",
"(?<messagetmp>globus_xio:.*)"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
"",
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 "
]
}
if [messagetmp] {
mutate {
add_field => {
"[@metadata][message]" => "%{messagetmp}"
}
}
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][message]" =>
[
"(?:.* )%{PROG:program} :: (?:(?<action>[[:alnum:] ]+) :: (?:%{CHECKSUM_ALGO:[file][check_sum_algo]}|%{INT}) :: )?(?:.* )?(?:%{IPORHOST:[file][host]}:)?%{FILEPATH:[file][path]}(?: :: %{CERT:[@metadata][cert]} :: %{HOST_REMOTE})?",
"(?:.*)(?:(?<action>Start|Finish|Failure)(?:(?: attempt)?ing to transfer|ed transferring) \")%{FARM_FILE}\"(?:.*)",
"(?:.*(?:New|Closed) connection from:? )%{HOST_REMOTE}(?::%{NUMBER}.*)",
"(?:.*DN )%{CERT:[@metadata][cert]}(?: successfully authorized.*)"
]
}
tag_on_failure => [ ]
}
if [@metadat][message] =~ /.*START=[0-9]+.*$/ {
kv {
field_split => " "
target => "gridftp"
source => "[@metadata][message]"
}
if [gridftp][START] {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[gridftp][FILE]" =>
[
"%{FARM_FILE}"
]
"[gridftp][DEST]" =>
[
"\[%{HOST_REMOTE}\]"
]
}
tag_on_failure => [ "gridftp_file_or_remotehost_failed" ]
}
mutate {
gsub =>
[
"[gridftp][START]",
"([,.][0-9]+)+$",
""
]
convert => {
"[gridftp][BUFFER]" => "integer"
"[gridftp][BLOCK]" => "integer"
"[gridftp][NBYTES]" => "integer"
"[gridftp][STREAMS]" => "integer"
"[gridftp][STRIPES]" => "integer"
"[gridftp][CODE]" => "integer"
}
rename => {
"[gridftp][NBYTES]" => "[file][size]"
}
remove_field => [
"[gridftp][FILE]",
"[gridftp][DEST]"
]
}
date {
match =>
[
"[gridftp][START]",
"yyyyMMddHHmmss",
"ISO8601"
]
}
}
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyyMMddHHmmss",
"yyyyMMdd'T'HHmmss",
"EEE MMM dd HH:mm:ss yyyy",
"ISO8601"
]
}
}
else if [type] == "dpns" or [type] == "dpmcopy" or [type] == "srm" or [type] == "dpm" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"(?<timestamp>%{MONTHNUM}\/%{MONTHDAY} %{TIME}) *%{NUMBER:pid:int}(?:,%{NUMBER:thread:int})? %{PROG}: %{MESSAGE:[@metadata][message]}"
]
}
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][message]" =>
[
"(?:.*(?: request by|\h+ from|getidmap) )%{CERT:[@metadata][cert]}(?:(?: [(]\d+(?:,\d+)*[)])?(?: from %{HOST_REMOTE})?)*"
]
}
tag_on_failure => [ ]
}
if [timestamp] {
mutate {
replace =>
[
"[@metadata][timestamp]",
"%{+YYYY} %{timestamp}"
]
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyy MM/dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "dracut" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"(?<timestamp>%{DAY} %{MONTH} *%{MONTHDAY} *%{TIME}) *%{TZ} %{YEAR:year} %{LOGLEVEL:loglevel}: %{MESSAGE:[@metadata][message]}"
]
}
}
if [timestamp] {
mutate {
replace =>
[
"[@metadata][timestamp]",
"%{year} %{timestamp}"
]
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
"",
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 "
]
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyy EEE MMM dd HH:mm:ss",
"ISO8601"
]
}
}
}
else if [type] == "fetch_crl" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{WORD:loglevel}%{MESSAGE:[@metadata][message]}"
]
}
}
}
else if [type] == "glexec" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{SYSLOGPROG} %{TIMESTAMP_ISO8601_STR:timestamp}%{NOTSPACE}?: %{MESSAGE:[@metadata][message]}",
".*:%{SYSLOGPROG} +.*_%{LOGLEVEL:loglevel}: (?<timestamp>%{YMD}.%{TIME})%{ISO8601_TIMEZONE}: *%{MESSAGE:[@metadata][message]}"
]
}
}
mutate {
gsub =>
[
"timestamp",
"[,.][0-9]+$",
"",
"timestamp",
"[-/ T.:]",
""
]
}
date {
match =>
[
"timestamp",
"yyyyMMddHHmmss"
]
}
}
else if [type] == "gridmap" {
if [message] =~ /\/bin\/sh: \/usr\/sbin\/lcg-expiregridmapdir.pl: No such file or directory/ {
drop { }
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"(?<timestamp>%{DAY} %{MONTH} %{MONTHDAY} %{TIME}) %{TZ} %{YEAR} %{MESSAGE:[@metadata][message]}"
]
}
}
if [timestamp] {
mutate {
replace =>
[
"[@metadata][timestamp]",
"%{year} %{timestamp}"
]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 ",
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyy EEE MMM dd HH:mm:ss",
"ISO8601"
]
}
}
}
else if [type] == "httpd" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{HOST_REMOTE} (?:- )?%{USER:ident} %{USER:auth} \[(?<timestamp>%{MONTHDAY}/%{MONTHNUM}/%{YEAR}:%{TIME}) [-+]?%{INT}\] \"(?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion:float})?|%{DATA:rawrequest})\" %{NUMBER:response:int} (?:%{NUMBER:bytes:float}|-) %{MESSAGE:[@metadata][message]}",
"%{HOST_REMOTE} - - \[(?<timestamp>%{MONTHDAY}/%{MONTH}/%{YEAR}:%{HOUR}:%{MINUTE}:%{SECOND}) [-+0-9]*\] +%{MESSAGE:[@metadata][message]}",
"\[%{DMDTY:timestamp}\] \[%{LOGLEVEL:loglevel}\] (?:\[.*?%{HOST_LOCAL}\] )?%{MESSAGE:[@metadata][message]}",
"\[(?:(?<timestamp>%{MONTHDAY}/%{MONTHNUM}/%{YEAR}:%{TIME})|(?<timestamp>%{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME})) %{ISO8601_TIMEZONE}\] %{HOST_LOCAL} %{MESSAGE:[@metadata][message]}",
"\[ *(?<timestamp>%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME})(?:[.,]%{INT})? +%{INT:pid}/.* +\]: +%{MESSAGE:[@metadata][message]}"
]
}
}
mutate {
gsub =>
[
"timestamp",
" +([0-9]) ",
" 0\1 ",
"timestamp",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"timestamp",
"dd/MMM/yyyy:HH:mm:ss",
"dd/MM/yyyy:HH:mm:ss",
"EEE MMM dd HH:mm:ss yyyy",
"yyyy-MM-dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "libvirt" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{TIMESTAMP_ISO8601:[@metadata][timestamp]}: %{INT:pid}: %{LOGLEVEL:loglevel} *: %{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"ISO8601"
]
}
}
else if [type] == "virt_hosts" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{TIMESTAMP_ISO8601:[@metadata][timestamp]}(?:[+-]?[0-9]+)?: (?<state>%{WORD}%{SPACE}%{WORD}) %{MESSAGE:[@metadata][message]}",
"(?<messagetmp>quemu: .*)"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
if [messagetmp] {
mutate {
add_field => {
"[@metadata][message]" => "%{messagetmp}"
}
}
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"ISO8601"
]
}
}
else if [type] == "mail" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{SYSLOGTIMESTAMP:[@metadata][timestamp]} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{PROG:program}*(?:/%{WORD:action})?(?:\[%{POSINT:pid}\])?:(?: %{MAILQUEUE:mailqueue}:)? %{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 ",
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"MMM dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "munin" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{YMDT:[@metadata][timestamp]} \[%{LOGLEVEL:loglevel}?%{INT:pid}?\] %{MESSAGE:[@metadata][message]}",
"%{YMDT:[@metadata][timestamp]} +(?:\[%{INT:pid}\] +)?%{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
"",
"[@metadata][timestamp]",
"[-/T]",
" "
]
}
date {
match =>
[
"[@metadata][timestamp]",
#"yy MM dd HH:mm:ss",
"yyyy MM dd HH:mm:ss"
]
}
}
else if [type] == "mysql" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{YMDT:[@metadata][timestamp]} +(?:\[%{LOGLEVEL:loglevel}\] +)?%{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
"",
"[@metadata][timestamp]",
" ",
" ",
"[@metadata][timestamp]",
"[-/T]",
" "
]
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyy MM dd HH:mm:ss"
]
}
}
else if [type] == "nagios" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"\[%{NUMBER:[@metadata][timestamp]}\] %{MESSAGE:[@metadata][message]}",
"%{YMDT:[@metadata][timestamp]} %{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
"",
"[@metadata][timestamp]",
"[-/T]",
" "
]
}
date {
match =>
[
"[@metadata][timestamp]",
"UNIX",
"yyyy MM dd HH:mm:ss"
#"MM dd yyyy HH:mm:ss",
#"dd MM yyyy HH:mm:ss",
]
}
}
else if [type] == "osad" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{YMDT:[@metadata][timestamp]} %{MESSAGE:[@metadata][message]message}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
if "_grokparsefailure" in [tags] {
mutate {
replace => { "message" => "%{[@metadata][timestamp]} %{message}"}
}
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
"",
"[@metadata][timestamp]",
" ",
" ",
"[@metadata][timestamp]",
"[-/T]",
" "
]
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyy MM dd HH:mm:ss"
]
}
}
else if [type] == "puppet" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"(?:\[%{TIMESTAMP_ISO8601:[@metadata][timestamp]}\]|(?<timestamp>%{DAY} %{MONTH} ?%{MONTHDAY} %{TIME}) %{ISO8601_TIMEZONE} %{YEAR:year}) (?:(?:%{PROG:program}|%{DATA:puppet_resource}) \(?%{LOGLEVEL:loglevel}\)?: )?%{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][message]" =>
[
"(?:Finished catalog run in %{NUMBER:puppet_run:float} seconds)"
]
}
tag_on_failure => [ ]
}
if [timestamp] {
mutate {
replace =>
[
"[@metadata][timestamp]",
"%{year} %{timestamp}"
]
}
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 ",
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyy EEE MMM dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "puppetdb" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{TIMESTAMP_ISO8601:[@metadata][timestamp]} %{LOGLEVEL:loglevel} +%{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][message]" =>
[
"%{HOST_REMOTE}$"
]
}
tag_on_failure => [ ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"yyyy-MM-dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "rfio" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{SYSLOGTIMESTAMP:[@metadata][timestamp]} %{SYSLOGPROG}: %{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][message]" =>
[
"(?:: connection from \[)%{HOST_REMOTE}(?:\] \()%{HOST_REMOTE}(?:\))$",
"%{NOTSPACE:action}(?:: (?:unlink\(|file: ))%{FILEPATH:[file][path]}(?:\) .*)"
]
}
tag_on_failure => [ ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 ",
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"MMM dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "rhncfg" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"%{TIMESTAMP_ISO8601:[@metadata][timestamp]} %{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"ISO8601"
]
}
}
else if [type] == "rsyslog" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"(?:%{SYSLOGTIMESTAMP:[@metadata][timestamp]}|%{TIMESTAMP_ISO8601:[@metadata][timestamp]}) (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} +%{SYSLOGPROG}: %{MESSAGE:[@metadata][message]}",
" *(?:\<?.*\>)?%{SYSLOGPROG}: %{MESSAGE:[@metadata][message]}",
" *%{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"host" =>
[
"%{HOST_LOCAL}"
]
}
tag_on_failure => [ ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][message]" =>
[
"(?:.*)(?:password for root from *|rhost=)%{HOST_REMOTE}(?: +port| +user=)(?:.*)"
]
}
tag_on_failure => [ ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 ",
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"MMM dd HH:mm:ss",
"ISO8601"
]
timezone => "UTC"
}
}
else if [type] == "sssd" or [type] == "up2date" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"[\(\[]?%{DMDTY:[@metadata][timestamp]}[\]\)]? %{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 ",
"[@metadata][timestamp]",
"[,.][0-9]+ ",
" "
]
}
date {
match =>
[
"[@metadata][timestamp]",
"EEE MMM dd HH:mm:ss yyyy",
"ISO8601"
]
}
}
else if [type] == "syslog" or [type] == "secure" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"(?:%{SYSLOGTIMESTAMP:[@metadata][timestamp]}|%{TIMESTAMP_ISO8601:[@metadata][timestamp]}) (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}(?<action>\[.*?\])?: %{MESSAGE:[@metadata][message]}"
]
}
overwrite => [ "[@metadata][timestamp]" ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][message]" =>
[
"(?:.*)(?:password for root from *|rhost=)%{HOST_REMOTE}(?: +port| +user=)(?:.*)"
]
}
tag_on_failure => [ ]
}
mutate {
gsub =>
[
"[@metadata][timestamp]",
" +([0-9]) ",
" 0\1 ",
"[@metadata][timestamp]",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"[@metadata][timestamp]",
"MMM dd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "torque_accounting" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"source" =>
[ "(?:/.+)+/%{INTEGER:[@metadata][torque_source]}"]
}
}
mutate {
replace => {
"[@metadata][host_name]" => "torque.farm.particle.cz"
"source" => "/var/spool/torque/server_priv/accounting/%{[@metadata][torque_source]}"
"[@metadata][host_ip]" => "147.231.25.5"
}
gsub => [ "message", ":ppn=", " ppn="]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[ "%{MDYT:[@metadata][timestamp]};%{ONE_CHAR:[@metadata][torque_state]};%{INTEGER:[@metadata][torque_jobid]}(?:-%{INTEGER:[@metadata][torque_subjobid]})?(?:.torque.farm.particle.cz;)%{MESSAGE:[@metadata][message]}"]
}
overwrite => [ "[@metadata][timestamp]" ]
}
kv {
source => "[@metadata][message]"
target => "torque"
recursive => true
}
if [@metadata][torque_state] == "A" {
mutate {
add_field => {
"[torque][job][state]" => "Aborted"
}
}
}
else if [@metadata][torque_state] == "C" {
mutate {
add_field => {
"[torque][job][state]" => "Checkpointed"
}
}
}
else if [@metadata][torque_state] == "D" {
mutate {
add_field => {
"[torque][job][state]" => "Deleted"
}
}
}
else if [@metadata][torque_state] == "E" {
mutate {
add_field => {
"[torque][job][state]" => "Ended"
}
}
}
else if [@metadata][torque_state] == "Q" {
mutate {
add_field => {
"[torque][job][state]" => "Queued"
}
}
}
else if [@metadata][torque_state] == "R" {
mutate {
add_field => {
"[torque][job][state]" => "Rerunned"
}
}
}
else if [@metadata][torque_state] == "S" {
mutate {
add_field => {
"[torque][job][state]" => "Started"
}
}
}
else if [@metadata][torque_state] == "T" {
mutate {
add_field => {
"[torque][job][state]" => "Restarted"
}
}
}
else {
mutate {
add_field => {
"[torque][job][state]" => "%{[@metadata][torque_state]}"
}
}
}
mutate {
split => {
"[torque][exec_host]" => '+'
}
}
mutate {
join => {
"[torque][ppn]" => ","
}
rename => {
"[torque][exec_host]" => "[@metadata][torque_exec_host]"
}
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][torque_exec_host]" =>
[
"%{HOST:[torque][exec][host]}/%{INTEGER:[torque][job][slot]}"
]
}
tag_on_failure => [ ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[torque][exec][host]" =>
[
"%{NOTNUMBER:[torque][exec][cluster]}(?:\d+$)"
]
}
tag_on_failure => [ ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[torque][owner]" =>
[
"%{USER:[torque][owner]}@%{HOST:[torque][computing_element]}"
]
}
overwrite => [ "[torque][owner]" ]
tag_on_failure => [ ]
}
if [@metadata][torque_subjobid] {
mutate {
add_field => {
"[torque][job][subjob]" => "%{[@metadata][torque_subjobid]}"
}
}
}
if ![torque][ppn] {
mutate {
add_field => {
"[torque][ppn]" => "1"
}
}
}
mutate {
join => {
"[torque][exec][host]" => ","
"[torque][exec][cluster]" => ","
}
}
mutate {
add_field => {
"[torque][job][id]" => "%{[@metadata][torque_jobid]}"
}
rename => {
"[torque][Resource_List.mem]" => "[torque][resource_list][mem]"
"[torque][Resource_List.neednodes]" => "[torque][resource_list][neednodes]"
"[torque][Resource_List.nodect]" => "[torque][resource_list][nodect]"
"[torque][Resource_List.nodes]" => "[torque][resource_list][nodes]"
"[torque][Resource_List.cput]" => "[torque][resource_list][cput]"
"[torque][Resource_List.vmem]" => "[torque][resource_list][vmem]"
"[torque][Resource_List.walltime]" => "[torque][resource_list][walltime]"
"[torque][resources_used.cput]" => "[torque][resources_used][cput]"
"[torque][resources_used.mem]" => "[torque][resources_used][mem]"
"[torque][resources_used.vmem]" => "[torque][resources_used][vmem]"
"[torque][Exit_status]" => "[torque][exit_code]"
"[torque][resources_used.walltime]" => "[torque][resources_used][walltime]"
}
}
mutate {
gsub =>
[
"[torque][resource_list][mem]", "mb$", "",
"[torque][resource_list][vmem]", "mb$", "",
"[torque][resources_used][mem]", "kb$", "",
"[torque][resources_used][vmem]", "kb$", "",
"message", " ppn=", ":ppn=",
"[torque][ppn]", "(,.*)+", "",
"[torque][exec][host]", "(,.*)+", "",
"[torque][exec][cluster]", "(,.*)+", ""
]
}
if [torque][exec][host] =~ /^aplex3[12]$/ {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.68"
}
}
}
else if [torque][exec][cluster] == "aplex" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.65"
}
}
}
else if [torque][exec][cluster] == "malva" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.76"
}
}
}
else if [torque][exec][cluster] == "rubuk" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.23"
}
}
}
else if [torque][exec][cluster] == "rubul" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "1.9"
}
}
}
else if [torque][exec][cluster] == "rubus" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "1.89"
}
}
}
else if [torque][exec][cluster] == "ib" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.09"
}
}
}
else if [torque][exec][cluster] == "ib" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.09"
}
}
}
else if [torque][exec][cluster] == "ibis" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "1.95"
}
}
}
else if [torque][exec][cluster] == "saltix" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.99"
}
}
}
else if [torque][exec][cluster] == "iberis" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.33"
}
}
}
else if [torque][exec][cluster] == "salix" {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.3"
}
}
}
else if [torque][exec][host] =~ /^golias1(4[3-9]|50)$/ {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.75"
}
}
}
else if [torque][exec][host] =~ /^golias1(5[1-9]|6[0-2])$/ {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.12"
}
}
}
else if [torque][exec][host] =~ /^golias1(6[3-9]|70)$/ {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "2.24"
}
}
}
else {
mutate {
add_field => {
"[@metadata][host_multiplier]" => "1"
}
}
}
mutate {
convert => {
"[@metadata][host_multiplier]" => "float"
"[torque][job][id]" => "integer"
"[torque][job][slot]" => "integer"
"[torque][job][subjob]" => "integer"
"[torque][ppn]" => "integer"
"[torque][resource_list][mem]" => "integer"
"[torque][resource_list][vmem]" => "integer"
"[torque][resource_list][neednodes]" => "integer"
"[torque][resource_list][nodes]" => "integer"
"[torque][resource_list][nodect]" => "integer"
"[torque][exit_code]" => "integer"
"[torque][session]" => "integer"
"[torque][resources_used][mem]" => "integer"
"[torque][resources_used][vmem]" => "integer"
}
}
if [torque][resource_list][cput] {
mutate {
replace => {
"[@metadata][res_list_cput]" => "%{[torque][resource_list][cput]}"
}
split => {
"[@metadata][res_list_cput]" => ":"
}
}
ruby {
code => "event['[torque][resource_list][cput]'] = Integer(event['[@metadata][res_list_cput][0]'].to_i * 3600 + event['[@metadata][res_list_cput][1]'].to_i * 60 + event['[@metadata][res_list_cput][2]'].to_i)"
}
}
if [torque][resource_list][walltime] {
mutate {
replace => {
"[@metadata][res_list_walltime]" => "%{[torque][resource_list][walltime]}"
}
split => {
"[@metadata][res_list_walltime]" => ":"
}
}
ruby {
code => "event['[torque][resource_list][walltime]'] = Integer(event['[@metadata][res_list_walltime][0]'].to_i * 3600 + event['[@metadata][res_list_walltime][1]'].to_i * 60 + event['[@metadata][res_list_walltime][2]'].to_i)"
}
}
if [torque][resources_used][walltime] {
mutate {
replace => {
"[@metadata][res_use_walltime]" => "%{[torque][resources_used][walltime]}"
}
split => {
"[@metadata][res_use_walltime]" => ":"
}
}
ruby {
code => "event['[torque][resources_used][walltime_raw]'] = Integer(event['[@metadata][res_use_walltime][0]'].to_i * 3600 + event['[@metadata][res_use_walltime][1]'].to_i * 60 + event['[@metadata][res_use_walltime][2]'].to_i)"
}
ruby {
code => "event['[torque][resources_used][hepspec_time]'] = Integer(event['[torque][resources_used][walltime_raw]'].to_i * 3.992)"
}
ruby {
code => "event['[torque][resources_used][core_walltime]'] = Integer(event['[torque][resources_used][walltime_raw]'].to_i * event['[torque][ppn]'].to_i)"
}
mutate {
replace => {
"[torque][resources_used][walltime]" => "%{[torque][resources_used][core_walltime]}"
}
}
ruby {
code => "event['[torque][resources_used][walltime_real]'] = Integer(event['[torque][resources_used][core_walltime]']/event['[@metadata][host_multiplier]'])"
}
mutate {
convert => {
"[torque][resources_used][walltime]" => "integer"
}
}
}
if [torque][resources_used][cput] {
mutate {
replace => {
"[@metadata][res_use_cput]" => "%{[torque][resources_used][cput]}"
}
split => {
"[@metadata][res_use_cput]" => ":"
}
}
ruby {
code => "event['[torque][resources_used][cput]'] = Integer(event['[@metadata][res_use_cput][0]'].to_i * 3600 + event['[@metadata][res_use_cput][1]'].to_i * 60 + event['[@metadata][res_use_cput][2]'].to_i)"
}
}
if [torque][resource_list][mem] {
ruby {
code => "event['[torque][resource_list][mem]'] = Integer(event['[torque][resource_list][mem]']) * 1048576"
}
}
if [torque][resource_list][vmem] {
ruby {
code =>"event['[torque][resource_list][vmem]'] = Integer(event['[torque][resource_list][vmem]']) * 1048576"
}
}
if [torque][resources_used][mem] {
ruby {
code => "event['[torque][resources_used][mem]'] = Integer(event['[torque][resources_used][mem]']) * 1024"
}
}
if [torque][resources_used][vmem] {
ruby {
code =>"event['[torque][resources_used][vmem]'] = Integer(event['[torque][resources_used][vmem]']) * 1024"
}
}
date {
match => [ "[@metadata][timestamp]", "MM/dd/yyyy HH:mm:ss" ]
}
if [torque][ctime] {
date{
match => [ "[torque][ctime]", "UNIX" ]
target => "[torque][ctime]"
}
}
if [torque][qtime] {
date {
match => [ "[torque][qtime]", "UNIX" ]
target => "[torque][qtime]"
}
}
if [torque][etime] {
date {
match => [ "[torque][etime]", "UNIX" ]
target => "[torque][etime]"
}
}
if [torque][start] {
date{
match => [ "[torque][start]", "UNIX" ]
target => "[torque][start]"
}
}
if [torque][end] {
date{
match => [ "[torque][end]", "UNIX" ]
target => "[torque][end]"
}
}
}
else if [type] == "xrootd" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"(?<timestamp>%{YMDR} %{TIME}) %{INT:pid:int} (?:%{PROG:program}: )?%{MESSAGE:[@metadata][message]}",
"(?<timestamp>%{YMDR} %{TIME}) %{MESSAGE:[@metadata][message]}"
]
}
}
mutate {
gsub =>
[
"timestamp",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"timestamp",
"yyMMdd HH:mm:ss",
"yyyyMMdd HH:mm:ss",
"ISO8601"
]
}
}
else if [type] == "yum" {
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"message" =>
[
"(?<timestamp>%{MONTH} %{MONTHDAY} %{TIME}) (?:%{WORD:action}: )?%{MESSAGE:[@metadata][message]}"
]
}
}
mutate {
gsub =>
[
"timestamp",
"[,.][0-9]+$",
""
]
}
date {
match =>
[
"timestamp",
"MMM dd HH:mm:ss",
"ISO8601"
]
}
}
## Co neznáme to zahodíme
else {
drop { }
}
## prázdný message je nám na dvě věci a odpad taky
if ([message] =~ /^(?:(_QUOTE_)*|[[:space:]])*$/) or ("drop" in [tags]) or ([message] == "") or ![message] {
drop { }
}
mutate {
remove_field => [ "host" ]
join => {
"[@metadata][host_ip]" => ","
"[@metadata][host_name]" => ","
"[@metadata][remote_host_name]" => ","
"[@metadata][remote_host_ip]" => ","
}
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][host_ip]" =>
[
"%{IP:[@metadata][host_ip]}"
]
}
overwrite => [ "[@metadata][host_ip]" ]
tag_on_failure => [ ]
}
if [@metadata][host_name] !~ /^\D\w+(\.\w+)+$/ {
if [beat][hostname] and ([beat][hostname] !~ /^[[:space:]]*$/) {
mutate {
replace => {
"[@metadata][host_name]" => "%{[beat][hostname]}"
}
}
}
else if [@metadata][host_ip] {
mutate {
replace => {
"[@metadata][host_name]" => "%{[@metadata][host_ip]}"
}
}
dns {
reverse => [ "[@metadata][host_name]" ]
action => "replace"
}
}
else if [logsource] !~ /^[[:space:]]*$/ {
mutate {
replace => {
"[@metadata][host_name]" => "%{logsource}"
}
gsub =>
[
"[@metadata][host_name]",
"(,.*)+$",
""
]
}
}
else {
mutate {
replace => {
"[@metadata][host_name]" => "unknown.host.elastic01-logstash.Cvj8T.none"
}
}
}
}
if ![@metadata][host_ip] {
if [@metadata][host_name] == "unknown.host.elastic01-logstash.Cvj8T.none" {
mutate {
replace => {
"[@metadata][host_ip]" => "198.51.100.1"
}
}
}
else {
mutate {
replace => {
"[@metadata][host_ip]" => "%{[@metadata][host_name]}"
}
gsub =>
[
"[@metadata][host_ip]",
"(,.*)+$",
""
]
}
if [@metadata][host_ip] =~ /^\D\w+(\.\w+)+$/ {
dns {
resolve => [ "[@metadata][host_ip]" ]
action => "replace"
}
}
}
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][remote_host_ip]" =>
[
"%{IP:[@metadata][remote_host_ip]}"
]
}
overwrite => [ "[@metadata][remote_host_ip]" ]
tag_on_failure => [ ]
}
if [@metadata][remote_host_name] !~ /^\D\w+(\.\w+)+$/ {
if [@metadata][remote_host_ip] {
mutate {
replace => {
"[@metadata][remote_host_name]" => "%{[@metadata][remote_host_ip]}"
}
}
dns {
reverse => [ "[@metadata][remote_host_name]" ]
action => "replace"
}
}
else {
mutate {
replace => {
"[@metadata][remote_host_name]" => "unknown.host.alziyVzonAQJ8MT02N2eDA1xSe.none"
}
}
}
}
if ![@metadata][remote_host_ip] {
if [@metadata][remote_host_name] == "unknown.host.alziyVzonAQJ8MT02N2eDA1xSe.none" {
mutate {
replace => {
"[@metadata][remote_host_ip]" => "203.0.113.1"
}
}
}
else {
mutate {
replace => {
"[@metadata][remote_host_ip]" => "%{[@metadata][remote_host_name]}"
}
gsub =>
[
"[@metadata][remote_host_ip]",
"(,.*)+$",
""
]
}
if [@metadata][remote_host_ip] =~ /^\D\w+(\.\w+)+$/ {
dns {
resolve => [ "[@metadata][remote_host_ip]" ]
action => "replace"
}
}
}
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][host_ip]" => [ "%{IP:[host][ip]}" ]
}
tag_on_failure => [ ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][host_name]" => [ "%{HOST:[host][name]}" ]
}
tag_on_failure => [ ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][remote_host_ip]" => [ "%{IP:[remote_host][ip]}" ]
}
tag_on_failure => [ ]
}
grok {
patterns_dir => "/etc/logstash/patterns"
match => {
"[@metadata][remote_host_name]" => [ "%{HOST:[remote_host][name]}" ]
}
tag_on_failure => [ ]
}
if (([host][ip] == "198.51.100.1") or ![host][ip]) and (([host][name] == "unknown.host.elastic01-logstash.Cvj8T.none") or ![host][name]) {
mutate {
add_field => {
"[@metadata][backup_file_name]" => "unknown.host.elastic01-logstash"
}
remove_field => [
"[host][name]",
"[host][ip]"
]
}
}
else if [host][name] == "unknown.host.elastic01-logstash.Cvj8T.none" {
mutate {
add_field => {
"[@metadata][backup_file_name]" => "%{[host][ip]}"
}
remove_field => [
"[host][name]"
]
}
geoip {
source => "[host][ip]"
target => "[host][geoip]"
database => "/usr/share/GeoIP/GeoLiteCity.dat"
}
}
else if ([host][ip] == "198.51.100.1") or ![host][ip] {
mutate {
remove_field => [ "[host][ip]" ]
add_field => {
"[@metadata][backup_file_name]" => "%{[host][name]}"
}
}
}
else {
mutate {
add_field => {
"[@metadata][backup_file_name]" => "%{[host][name]}"
}
}
geoip {
source => "[host][ip]"
target => "[host][geoip]"
database => "/usr/share/GeoIP/GeoLiteCity.dat"
}
}
if [remote_host][name] == "unknown.host.alziyVzonAQJ8MT02N2eDA1xSe.none" {
mutate {
remove_field => [ "[remote_host][name]" ]
}
}
if ([remote_host][ip] == "203.0.113.1") {
mutate {
remove_field => [ "[remote_host][ip]" ]
}
}
else {
geoip {
source => "[remote_host][ip]"
target => "[remote_host][geoip]"
database => "/usr/share/GeoIP/GeoLiteCity.dat"
}
}
if ![host][geoip][location] and [host][name] {
geoip {
source => "[host][name]"
target => "[host][geoip]"
database => "/usr/share/GeoIP/GeoLiteCity.dat"
}
}
if ![remote_host][geoip][location] and [remote_host][name] {
geoip {
source => "[remote_host][name]"
target => "[remote_host][geoip]"
database => "/usr/share/GeoIP/GeoLiteCity.dat"
}
}
if [@metadata][cert] {
kv {
field_split => "/"
target => "cert"
source => "[@metadata][cert]"
}
}
## odstraníme přebytečná pole
mutate {
remove_field =>
[
"day",
"hour",
"minute",
"month",
"monthday",
"monthnum",
"second",
"time",
"timestamp",
"tz",
"year",
"offset",
"[beat][name]",
"[beat][hostname]"
]
gsub => [
"message",
" +",
" "
#"message",
#" _QUOTE_ ",
#"'"
]
strip => [ "message" ]
}
fingerprint {
method => "SHA512"
source => "message"
target => "fingerprint"
base64encode => true
## TOHLE NIKDY NEMĚŇTE!!! (key)
key => "3JLnwPL8TYyOI3mKizzOHm26sqa/Osh2O+IAba8J"
}
}
if [@metadata][beat] == "packetbeat" {
mutate {
add_field => {
"[@metadata][index_name]" => "packetbeat"
}
}
}
else if [@metadata][beat] == "topbeat" {
mutate {
add_field => {
"[@metadata][index_name]" => "topbeat"
}
}
}
else {
mutate {
add_field => {
"[@metadata][index_name]" => "logstash"
}
}
}
mutate {
add_field => {
"[@metadata][fingerprint_source]" => "%{message}%{[host][name]}%{[host][ip]}}%{source}%{@timestamp}"
}
}
fingerprint {
method => "SHA512"
source => "[@metadata][fingerprint_source]"
target => "[@metadata][document_id]"
base64encode => true
## TOHLE NIKDY NEMĚŇTE!!! (key)
key => "8YLJqWwy8VTmfnTs6Y8bxe/aKZVIH3mtBz/ISYnn"
}
}
###############################################################################
## Výstupy
###############################################################################
output {
if [type] == "nagios_check_logstash" {
file {
path => "/tmp/logstash-monitor-logstash.out"
}
}
else {
elasticsearch {
hosts =>
[
"elastic01.farm.particle.cz:9200",
"elastic02.farm.particle.cz:9200",
"elastic03.farm.particle.cz:9200",
"elastic04.farm.particle.cz:9200",
"kibana.farm.particle.cz:9200",
"syslog.farm.particle.cz:9200"
]
index => "%{[@metadata][index_name]}-%{+yyyy-MM}_%{[@metadata][index_version]}"
flush_size => 2048
document_id => "%{[@metadata][document_id]}"
workers => 32
}
if [@metadata][index_name] == "logstash" {
file {
path => "/logy/logstash.bak/%{+yyyy}/%{+yyyy-MM}/%{+yyyy-MM-dd}/%{+yyyy-MM-dd}-%{[@metadata][backup_file_name]}.gz"
gzip => true
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment