Skip to content

Instantly share code, notes, and snippets.

@ITBlogger
Created August 14, 2018 16:12
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ITBlogger/3550c5db5a0a1e9f6dbfafad31aa3fe8 to your computer and use it in GitHub Desktop.
Save ITBlogger/3550c5db5a0a1e9f6dbfafad31aa3fe8 to your computer and use it in GitHub Desktop.
- pipeline.id: beats-intake
pipeline.workers: 2
config.string: |
input {
beats {
port => 5044
ssl => true
ssl_certificate => "/etc/pki/tls/certs/logstash-forwarder.crt"
ssl_key => "/etc/pki/tls/private/logstash-forwarder.key"
id => "beats_input"
}
}
filter {
if [field][document_type] == "kubernetes" or [fields][document_type] == "kubernetes" or [document_type] == "kubernetes" {
mutate {
copy => { "log" => "message" }
remove_field => "log"
id => "kubernetes_log_to_message_mutate"
}
}
mutate {
gsub => ["message", "^(\s+)?(\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K](\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K])?)", ""]
add_tag => [ "color_code_remover_filter" ]
id => "color_code_remover_filter"
}
}
output {
if [fileset][module] != "nginx" {
if [kubernetes][labels][itk-svc] in [ "itk-activity", "itk-distribution", "itk-messaging", "itk-subscription", "itk-voice", "itk-webhook", "ucoach-connect", "ucoach-console" ] {
pipeline { send_to => kubernetes-ruby-logs }
} else if [kubernetes][labels][itk-svc] in [ "itk-api", "itk-batch", "itk-chat-monitor", "itk-coaching", "itk-necromancer", "itk-profilex", "itk-students" ] {
pipeline { send_to => kubernetes-elixir-logs }
} else if [kubernetes][labels][itk-svc] in [ "ucoach-admin", "ucoach-cobbles", "ucoach-rooster", "ucoach-student", "ucoach-styles" ] {
pipeline { send_to => kubernetes-nodejs-logs }
} else if [field][document_type] == "ucoach_service_log" or [fields][document_type] == "ucoach_service_log" or [document_type] == "ucoach_service_log"
or ([source] in [ "/var/log/upstart/ucoach-admin.log", "/var/log/upstart/ucoach-cobbles.log", "/var/log/upstart/ucoach-rooster.log", "/var/log/upstart/ucoach-student.log" ]) {
pipeline { send_to => ucoach-service-logs }
} else if [field][document_type] == "itk_service_log" or [fields][document_type] == "itk_service_log" or [document_type] == "itk_service_log"
or ([source] in [ "/srv/itk-coaching/log/compute_aggregates.log", "/var/log/upstart/itk-api.log", "/var/log/upstart/itk-batch.log", "/var/log/upstart/itk-chat-monitor.log", "/var/log/upstart/itk-coaching.log",
"/var/log/upstart/itk-dead-letter.log", "/var/log/upstart/itk-necromancer.log", "/var/log/upstart/itk-profilex.log", "/var/log/upstart/itk-students.log" ]) {
pipeline { send_to => itk-service-logs }
} else if [field][document_type] == "itklog" or [fields][document_type] == "itklog" or [document_type] == "itklog" {
pipeline { send_to => itk-logs }
} else if [field][document_type] == "itk_misc_log" or [fields][document_type] == "itk_misc_log" or [document_type] == "itk_misc_log" {
pipeline { send_to => itk-misc-logs }
} else if [field][document_type] == "syslog" or [fields][document_type] == "syslog" or [document_type] == "syslog" {
pipeline { send_to => syslog-logs }
} else if [field][document_type] == "puma_log" or [fields][document_type] == "puma_log" or [document_type] == "puma_log" {
pipeline { send_to => puma-logs }
} else if [field][document_type] == "appsignal_log" or [fields][document_type] == "appsignal_log" or [document_type] == "appsignal_log" {
pipeline { send_to => appsignal-logs }
} else if [field][document_type] == "unicorn_stdout" or [fields][document_type] == "unicorn_stdout" or [document_type] == "unicorn_stdout"
or [field][document_type] == "unicorn_stderr" or [fields][document_type] == "unicorn_stderr" or [document_type] == "unicorn_stderr" {
pipeline { send_to => unicorn-logs }
} else if [field][document_type] == "filebeat" or [fields][document_type] == "filebeat" or [document_type] == "filebeat" or [kubernetes][labels][k8s-app] == "filebeat" {
pipeline { send_to => filebeat-logs }
} else if [field][document_type] == "metricbeat" or [fields][document_type] == "metricbeat" or [document_type] == "metricbeat" or [kubernetes][labels][k8s-app] == "metricbeat" {
pipeline { send_to => metricbeat-logs }
} else if [field][document_type] == "kibana" or [fields][document_type] == "kibana" or [document_type] == "kibana" {
pipeline { send_to => kibana-logs }
} else if [fileset][module] == "system" {
pipeline { send_to => system-module-logs }
} else if [field][document_type] == "generic_upstart_log" or [fields][document_type] == "generic_upstart_log" or [document_type] == "generic_upstart_log" {
pipeline { send_to => generic-upstart-logs }
} else {
pipeline { send_to => generic-logs }
}
} else if [fileset][module] == "nginx" {
pipeline { send_to => nginx-module-logs }
} else if "_grokparsesuccess" not in [tags] {
pipeline { send_to => grokparsefailure-logs }
}
}
- pipeline.id: kubernetes-ruby-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/kubernetes-ruby-log-filter.conf"
- pipeline.id: kubernetes-elixir-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/kubernetes-elixir-log-filter.conf"
- pipeline.id: kubernetes-nodejs-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/kubernetes-nodejs-log-filter.conf"
- pipeline.id: ucoach-service-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/10-ucoach-service-log-filter.conf"
- pipeline.id: itk-service-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/11-itk-service-log-filter.conf"
- pipeline.id: itk-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/12-itk-log-filter.conf"
- pipeline.id: itk-misc-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/12-itk-misc-log-filter.conf"
- pipeline.id: syslog-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/14-syslog-log-filter.conf"
- pipeline.id: puma-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/17-puma-log-filter.conf"
- pipeline.id: appsignal-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/18-appsignal-log-filter.conf"
- pipeline.id: unicorn-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/19-unicorn-log-filter.conf"
- pipeline.id: filebeat-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/21-filebeat-filter.conf"
- pipeline.id: metricbeat-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/22-metricbeat-filter.conf"
- pipeline.id: kibana-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/23-kibana-filter.conf"
- pipeline.id: system-module-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/24-system-module-filter.conf"
- pipeline.id: nginx-module-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/25-nginx-module-filter.conf"
- pipeline.id: generic-upstart-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/26-generic-upstart-log-filter.conf"
- pipeline.id: generic-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/29-generic-log-filter.conf"
- pipeline.id: grokparsefailure-logs
pipeline.workers: 1
path.config: "/etc/logstash/conf.d/grokparsefailure-log-filter.conf"
- pipeline.id: elasticsearch
pipeline.workers: 2
config.string: |
output {
elasticsearch {
hosts => ["<elk cluster>:9200"]
sniffing => false
manage_template => false
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "doc"
id => "elasticsearch_input"
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment