Skip to content

Instantly share code, notes, and snippets.

@curtismitchell
Last active January 26, 2017 15:49
Show Gist options
  • Save curtismitchell/888f51cb853bc1b88aeb to your computer and use it in GitHub Desktop.
Save curtismitchell/888f51cb853bc1b88aeb to your computer and use it in GitHub Desktop.
# pulls metrics using aws cli
input {
exec {
command => "@powershell $start=([DateTime]::Today).ToUniversalTime().ToString(\"O\");$end=([DateTime]::Today).AddDays(1).ToUniversalTime().ToString(\"O\");aws cloudwatch get-metric-statistics --metric-name EstimatedCharges --start-time $start --end-time $end --period 1440 --namespace AWS/Billing --statistics Sum --dimensions Name=Currency`,Value=USD"
interval => 3600
codec => json
}
}
filter {
ruby {
code => "event['EstimatedCharges'] = event['Datapoints'].sort_by! {|x| x['Timestamp']}.last"
}
#Set the Event Timestamp from the log
date {
match => [ "[EstimatedCharges][Timestamp]", "ISO8601" ]
timezone => "Etc/UCT"
}
}
output {
statsd {
host => "192.168.50.2"
gauge => { "aws.estimatedcharges" => "%{[EstimatedCharges][Sum]}"}
}
}
sudo rpm --import https://packages.elasticsearch.org/GPG-KEY-elasticsearch
sudo cat > /etc/yum.repos.d/elasticsearch.repo <<CONFIG
[elasticsearch-1.4]
name=Elasticsearch repository for 1.4.x packages
baseurl=http://packages.elasticsearch.org/elasticsearch/1.4/centos
gpgcheck=1
gpgkey=http://packages.elasticsearch.org/GPG-KEY-elasticsearch
enabled=1
CONFIG
sudo yum -y update
sudo yum -y upgrade
sudo yum install -y java-1.7.0-openjdk elasticsearch
sudo chkconfig --add elasticsearch
sudo /etc/init.d/elasticsearch start
# Install OpenJDK
sudo yum install -y java-1.7.0-openjdk
# Install InfluxDB
sudo yum -y install wget
wget https://s3.amazonaws.com/influxdb/influxdb-latest-1.x86_64.rpm
sudo rpm -ivh influxdb-latest-1.x86_64.rpm
sudo /etc/init.d/influxdb start
# Install StatsD and InfluxDB Backend
sudo yum -y install git nodejs python python-setuptools
sudo yum -y install epel-release
cd /opt
sudo git clone https://github.com/etsy/statsd.git
sudo yum -y install npm
sudo npm install statsd-influxdb-backend -d
# Install Apache
sudo yum -y install httpd
sudo chkconfig --levels 235 httpd on
sudo systemctl start httpd.service
cd ~
wget http://grafanarel.s3.amazonaws.com/grafana-1.9.1.tar.gz
tar xvf grafana-1.9.1.tar.gz
sudo mv grafana-1.9.1/ /var/www/html/grafana
sudo wget https://raw.githubusercontent.com/curtismitchell/vagrant-statsd-grafana/master/config.grafana.js -O /var/www/html/grafana/config.js
sudo restorecon -r /var/www/html
# Create two databases: demo and dash
curl -X POST 'http://localhost:8086/db?u=root&p=root' -d '{"name": "demo"}'
curl -X POST 'http://localhost:8086/db?u=root&p=root' -d '{"name": "dash"}'
#start StatsD
sudo wget https://raw.githubusercontent.com/curtismitchell/vagrant-statsd-grafana/master/statsd -O /etc/init.d/statsd
sudo chmod +x /etc/init.d/statsd
sudo chkconfig --add statsd
sudo systemctl start statsd
# credit: http://improveandrepeat.com/2014/11/using-logstash-to-analyse-iis-log-files-with-kibana/
input {
file {
type => "iis"
path => "C:/inetpub/logs/LogFiles/W3SVC*/*.log"
start_position => "beginning"
tags => ["web", "system"]
}
eventlog {
type => "Win32-EventLog"
logfile => "Application"
tags => ["app"]
}
eventlog {
type => "Win32-EventLog"
logfile => "System"
tags => ["system", "security"]
}
}
filter {
if [type] == "iis" {
#ignore log comments
if [message] =~ "^#" {
drop {}
}
mutate {
add_field => [ "hostip", "%{host}" ]
}
dns {
reverse => [ "host" ]
action => replace
}
grok {
# check that fields match your IIS log settings
type => "iis"
pattern => [ "%{TIMESTAMP_ISO8601:log_timestamp} %{NOTSPACE:site} %{IPORHOST:computername} %{IPORHOST:ip} %{WORD:method} %{URIPATH:page} %{NOTSPACE:querystring} %{NUMBER:port} %{NOTSPACE:username} %{IPORHOST:clienthost} %{NOTSPACE:httpversion} %{NOTSPACE:useragent} %{NOTSPACE:cookie} %{NOTSPACE:referrer} %{IPORHOST:hostname} %{NUMBER:status} %{NUMBER:substatus} %{NUMBER:scstatus} %{NUMBER:bytesreceived} %{NUMBER:bytessent} %{NUMBER:ms_time_taken}" ]
}
#Set the Event Timestamp from the log
date {
match => [ "log_timestamp", "YYYY-MM-dd HH:mm:ss" ]
timezone => "Etc/UCT"
}
useragent {
source => "useragent"
prefix => "browser"
}
mutate {
remove_field => [ "log_timestamp"]
}
}
}
# See documentation for different protocols:
# http://logstash.net/docs/1.4.2/outputs/elasticsearch
output {
elasticsearch {
host => "192.168.50.2"
protocol => "http"
}
}
input {
wmi {
query => "select PercentProcessorTime from Win32_PerfFormattedData_PerfOS_Processor where name = '_Total'"
interval => 10
type => "cpu"
}
wmi {
query => "select FreePhysicalMemory, TotalVisibleMemorySize from Win32_OperatingSystem"
interval => 10
type => "memory"
}
wmi {
query => "select BytesReceivedPerSec, BytesSentPerSec, BytesTotalPerSec from Win32_PerfRawData_Tcpip_NetworkInterface where BytesSentPerSec > 0"
interval => 10
type => "network"
}
}
filter {
mutate {
convert => ["PercentProcessorTime", "float"]
convert => ["FreePhysicalMemory", "float"]
convert => ["TotalVisibleMemorySize", "float"]
convert => ["BytesReceivedPersec", "float"]
convert => ["BytesSentPersec", "float"]
convert => ["BytesTotalPersec", "float"]
}
}
output {
statsd {
host => "192.168.50.2"
count => { "cpu" => "%{[PercentProcessorTime]}"}
count => {"totalmemory" => "%{[TotalVisibleMemorySize]}"}
count => {"freememory" => "%{[FreePhysicalMemory]}"}
}
statsd {
host => "192.168.50.2"
count => {"bytesreceivedpersec" => "%{[BytesReceivedPersec]}"}
count => {"bytessentpersec" => "%{[BytesSentPersec]}"}
count => {"bytestotalpersec" => "%{[BytesTotalPersec]}"}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment