Skip to content

Instantly share code, notes, and snippets.

@jippi
Last active August 29, 2015 13:55
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jippi/8729592 to your computer and use it in GitHub Desktop.
Save jippi/8729592 to your computer and use it in GitHub Desktop.
---
logstash_config:
input_01_cakephp_udp:
type: "input"
kind: "udp"
comment: "Read CakePHP application logs from UDP"
config:
host: 127.0.0.1
port : 1337
codec: 'json { charset => "UTF-8" }'
input_01_cakephp_tcp:
type: "input"
kind: "tcp"
comment: "Read CakePHP application logs from TCP"
config:
host: 127.0.0.1
port : 1337
codec: 'json { charset => "UTF-8" }'
input_02_syslog_tcp:
type: "input"
kind: "tcp"
comment: "Read syslog messages from TCP"
config:
port: 5000
type: "syslog"
input_02_syslog_udp:
type: "input"
kind: "udp"
comment: "Read syslog messages from UDP"
config:
port: 5000
type: "syslog"
input_07_file_redis:
type: "input"
kind: "file"
comment: "Tail Redis log"
config:
path: "/var/log/redis/redis-server.log"
type: "redis"
input_08_mysql_slow:
type: "input"
kind: "file"
config:
path: "/var/log/mysql/slow.log"
type: "mysql_slow"
format: 'plain'
filter_001_ensure_type:
type: "filter"
kind: "alter"
comment: "Make sure to have the type derived correctly from both syslog and files"
config:
coalesce: [ 'type', '{type}', '{@type}' ]
filter_000_parse_syslog:
template: "logstash/syslog.conf.erb"
filter_010_syslog_pri:
type: "filter"
kind: "syslog_pri"
comment: "Expand the syslog priority / facility into human readable names"
config:
tags: [ "cakephp" ]
filter_014_cron_rewrite_cron:
type: "filter"
kind: "grok"
comment: "Rewrite crontab 'program' field to be just CRON"
config:
match:
program: "/USR/SBIN/CRON"
remove_field: ["program"]
add_field:
program: "CRON"
tag_on_failure: [ ]
filter_015_cron:
type: "filter"
kind: "grok"
comment: "Parse Cron messages to extract more information about the execution"
config:
match:
program: "CRON"
message: "{CRON}"
patterns_dir: "%{logstash::patterns_dir}"
remove_field: ["message"]
add_field:
message: "{message}"
tag_on_failure: [ ]
filter_020_postfix_match:
type: "filter"
kind: "grok"
comment: "Sniff if a syslog message is from postfix"
config:
type: "syslog"
match:
program: "postfix/{PROG}"
add_tag: [ "postfix", "do_kv" ]
tag_on_failure: [ ]
filter_021_postfix_queue_id:
type: "filter"
kind: "grok"
comment: "Parse postfix logs to extract the queue_id"
config:
match:
message: "{POSTFIX_QUEUEID:queue_id}: {GREEDYDATA}"
tags: [ "postfix" ]
tag_on_failure: [ ]
patterns_dir: "%{logstash::patterns_dir}"
filter_022_expand_kv:
type: "filter"
kind: "kv"
comment: "Expand key=value pairs into field => value"
config:
trim: '<>\[\],'
tags: [ "do_kv" ]
filter_055_php_error_multiline:
type: "filter"
kind: "multiline"
config:
type: 'php_error'
pattern: '^(\s|#|Stack)'
what: 'previous'
filter_055_php_fpm_slow:
type: "filter"
kind: "grok"
config:
type: 'php_fpm_slow'
pattern: '{PHP_FPM_SLOW_LOG}'
patterns_dir: "%{logstash::patterns_dir}"
singles: true
tag_on_failure: [ ]
filter_056_php_fpm_slow:
type: "filter"
kind: "multiline"
config:
type: "php_fpm_slow"
pattern: '^$'
what: 'previous'
negate: true
filter_057_php_error:
type: "filter"
kind: "grok"
config:
type: 'php_error'
pattern: '{PHP_ERROR_LOG}'
patterns_dir: "%{logstash::patterns_dir}"
singles: true
filter_058_php_fpm_error:
type: "filter"
kind: "grok"
config:
type: 'php_fpm_error'
pattern: '{PHP_FPM_ERROR_LOG}'
patterns_dir: "%{logstash::patterns_dir}"
singles: true
filter_060_php_fpm_access_ip:
type: "filter"
kind: "grok"
config:
type: 'php_fpm_access'
match:
ip: '-'
remove_field: [ 'ip' ]
add_field: { ip: '' }
filter_060_php_fpm_access_app_version:
type: "filter"
kind: "grok"
config:
type: 'php_fpm_access'
match:
app_version: '-'
remove_field: [ 'app_version' ]
add_field: { app_version: '' }
filter_060_php_fpm_access_app_env:
type: "filter"
kind: "grok"
config:
type: 'php_fpm_access'
match:
app_env: '-'
remove_field: [ 'app_env' ]
add_field: { app_env: '' }
filter_070_redis:
type: "filter"
kind: "grok"
config:
type: "redis"
match:
message: "{REDISLOG}"
# https://github.com/logstash/logstash/blob/master/spec/examples/mysql-slow-query.rb
# https://gist.github.com/jordansissel/3753353
filter_079_mysql_slow_clean:
type: "filter"
kind: "grep"
config:
type: "mysql_slow"
negate: true
match:
message: "^# Time: "
filter_080_mysql_slow_mutli:
type: "filter"
kind: "multiline"
config:
type: "mysql_slow"
pattern: "^# User"
negate: true
what: "previous"
filter_081_mysql_slow_grok:
type: "filter"
kind: "grok"
config:
type: "mysql_slow"
singles: true
pattern:
- '^# User@Host: {USER:mysql_user}\[[^\]]+\] @ {HOST:mysql_host} \[{IP:mysql_ip}?] \s*Id: {NUMBER:id:int}'
- '^# Schema: {USER:mysql_schema} \s*Last_errno: {NUMBER:last_error_no:int} \s*Killed: {NUMBER:killed:int}'
- '^# Query_time: {NUMBER:duration:float} \s*Lock_time: {NUMBER:lock_wait:float} \s*Rows_sent: {NUMBER:results:int} \s*Rows_examined: {NUMBER:scanned:int} \s*Rows_affected: {NUMBER:affected:int}'
- '^# Bytes_sent: {NUMBER:bytes_sent:int}'
- '^SET timestamp={NUMBER:timestamp};'
filter_800_php_access_convert:
type: "filter"
kind: "mutate"
config:
# type: 'php_fpm_access'
convert:
cpu: float
duration: float
memory: integer
child_pid: integer
parent_pid: integer
status: integer
filter_800_sendgrid_convert:
type: "filter"
kind: "mutate"
config:
# type: "sendgrid"
convert:
user: integer
location: integer
attempt: integer
status: integer
filter_800_import_convert:
type: "filter"
kind: "mutate"
config:
# type: "import"
convert:
feed_id: integer
filter_800_sendgrid_convert:
type: "filter"
kind: "mutate"
config:
# type: "sendgrid"
convert:
user: integer
location: integer
attempt: integer
status: integer
# filter_800_clicks_convert:
# type: "filter"
# kind: "mutate"
# config:
# type: [ "clicks" ]
# convert:
# location: integer
filter_800_cakephp_convert:
type: "filter"
kind: "mutate"
config:
# tags: [ "cakephp" ]
convert:
app_version: integer
level: integer
php_pid: integer
output_02_es:
type: "output"
kind: "elasticsearch_http"
comment: "Write all logs to elasticsearch"
config:
host: "localhost"
port: 9200
codec: 'json { charset => "UTF-8" }'
index_type: "{type}"
define logstash::configfile(
$template = 'logstash/default.conf.erb',
$type = undef,
$kind = undef,
$config = {},
$comment = undef
) {
if ($comment == undef) {
$real_comment = "${type} - ${kind}"
} else {
$real_comment = $comment
}
file { "${logstash::configdir}/conf.d/${name}.conf":
content => template($template),
owner => $logstash::logstash_user,
group => $logstash::logstash_group,
notify => Service[$logstash::service_name],
require => File["${logstash::configdir}/conf.d"]
}
}
<%
def output_value(key, value)
if (value.is_a?(Hash))
value = value.flatten.to_s.gsub /{/, "%{"
elsif (value.is_a?(Array))
value = value.map! {|x|
"\"#{x}\""
}
.join(', ')
.gsub /{/, '%{'
value = "[#{value}]"
elsif (value.is_a?(String))
if key != 'codec'
value = "\"#{value}\""
value = value.gsub "{", "%{"
end
end
value
end
-%>
# <%= @real_comment %>
<%= @type -%> {
<%= @kind -%> {
<% @config.each do |key, value| -%>
<%= key %> => <%= output_value key, value %>
<% end -%>
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment