Skip to content

Instantly share code, notes, and snippets.

@seriald
Last active March 19, 2016 11:30
Show Gist options
  • Save seriald/6d0d75a052ac0678f747 to your computer and use it in GitHub Desktop.
Save seriald/6d0d75a052ac0678f747 to your computer and use it in GitHub Desktop.
Configure Elasticsearch Logstash Kibana

Confirm and update Hostname

sudo nano /etc/hosts
sudo nano /etc/hostname

Install Java 8

sudo add-apt-repository -y ppa:webupd8team/java
sudo apt-get update && apt-get -y install oracle-java8-installer

Test and Configure java

java -version
sudo apt-get install oracle-java8-set-default

Add package repos

wget -qO - https://packages.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
echo "deb http://packages.elastic.co/elasticsearch/2.x/debian stable main" | sudo tee -a /etc/apt/sources.list.d/elasticsearch-2.x.list
echo "deb http://packages.elastic.co/logstash/2.2/debian stable main" | sudo tee -a /etc/apt/sources.list
echo "deb http://packages.elastic.co/kibana/4.4/debian stable main" | sudo tee -a /etc/apt/sources.list

Installing Elasticsearch Logstash Kibana

sudo apt-get update && sudo apt-get -y install elasticsearch logstash kibana4

Setup ELK services

sudo update-rc.d elasticsearch defaults 95 10
sudo update-rc.d logstash defaults 95 10
sudo update-rc.d kibana defaults 95 10

Configure SSL

sudo mkdir -p /etc/pki/tls/certs
sudo mkdir /etc/pki/tls/private
cd /etc/pki/tls
sudo openssl req -x509 -nodes -newkey rsa:2048 -days 3650 -keyout private/logstash-forwarder.key -out certs/logstash-forwarder.crt -subj /CN=elk

Configure Logstash

nano /etc/logstash/conf.d/01-inputs.conf

Paste the following

#logstash-forwarder [Not utilized by pfSense by default]
#input {
#  lumberjack {
#    port => 5000
#    type => "logs"
#    ssl_certificate => "/etc/pki/tls/certs/logstash-forwarder.crt"
#    ssl_key => "/etc/pki/tls/private/logstash-forwarder.key"
#  }
#}

#tcp syslog stream via 514
input {  
  tcp {
    type => "syslog"
    port => 514
  }
}
#udp syslogs tream via 514
input {  
  udp {
    type => "syslog"
    port => 514
  }
}
nano /etc/logstash/conf.d/10-syslog.conf
filter {  
  if [type] == "syslog" {

    #change to pfSense ip address
    if [host] =~ /10\.1\.0\.1/ {
      mutate {
        add_tag => ["PFSense", "Ready"]
      }
    }

    if "Ready" not in [tags] {
      mutate {
        add_tag => [ "syslog" ]
      }
    }
  }
}

filter {  
  if [type] == "syslog" {
    mutate {
      remove_tag => "Ready"
    }
  }
}

filter {  
  if "syslog" in [tags] {
    grok {
      match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
    }
    syslog_pri { }
    date {
      match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM  dd HH:mm:ss" ]
      locale => "en"
    }

    if !("_grokparsefailure" in [tags]) {
      mutate {
        replace => [ "@source_host", "%{syslog_hostname}" ]
        replace => [ "@message", "%{syslog_message}" ]
      }
    }

    mutate {
      remove_field => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ]
    }
#    if "_grokparsefailure" in [tags] {
#      drop { }
#    }
  }
}
sudo nano /etc/logstash/conf.d/30-outputs.conf
output {  
elasticsearch { hosts => localhost index => "logstash-%{+YYYY.MM.dd}" }  
stdout { codec => rubydebug }  
}
sudo nano /etc/logstash/conf.d/11-pfsense.conf
filter {  
  if "PFSense" in [tags] {
    grok {
      add_tag => [ "firewall" ]
      match => [ "message", "<(?<evtid>.*)>(?<datetime>(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\s+(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:[0-5][0-9])) (?<prog>.*?): (?<msg>.*)" ]
    }
    mutate {
      gsub => ["datetime","  "," "]
    }
    date {
      match => [ "datetime", "MMM dd HH:mm:ss" ]
      timezone => "UTC"
    }
    mutate {
      replace => [ "message", "%{msg}" ]
    }
    mutate {
      remove_field => [ "msg", "datetime" ]
    }
}
if [prog] =~ /^filterlog$/ {  
    mutate {
      remove_field => [ "msg", "datetime" ]
    }
    grok {
      patterns_dir => "/etc/logstash/conf.d/patterns"
      match => [ "message", "%{PFSENSE_LOG_DATA}%{PFSENSE_IP_SPECIFIC_DATA}%{PFSENSE_IP_DATA}%{PFSENSE_PROTOCOL_DATA}",
         "message", "%{PFSENSE_LOG_DATA}%{PFSENSE_IPv4_SPECIFIC_DATA_ECN}%{PFSENSE_IP_DATA}%{PFSENSE_PROTOCOL_DATA}" ]
    }
    mutate {
      lowercase => [ 'proto' ]
    }
    geoip {
      add_tag => [ "GeoIP" ]
      source => "src_ip"
      # Optional GeoIP database
      database => "/etc/logstash/GeoLiteCity.dat"
    }
  }
}
mkdir /etc/logstash/conf.d/patterns
cd /etc/logstash/conf.d/patterns
sudo wget https://gist.githubusercontent.com/elijahpaul/3d80030ac3e8138848b5/raw/abba6aa8398ba601389457284f7c34bbdbbef4c7/pfsense2-2.grok
cd /etc/logstash
sudo curl -O "http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz"
sudo gunzip GeoLiteCity.dat.gz

Testing Elasticsearch

curl -X GET http://localhost:9200

Output should look like
{
  "name" : "Volcana",
  "cluster_name" : "elasticsearch",
  "version" : {
    "number" : "2.2.1",
    "build_hash" : "d045fc29d1932bce18b2e65ab8b297fbf6cd41a1",
    "build_timestamp" : "2016-03-09T09:38:54Z",
    "build_snapshot" : false,
    "lucene_version" : "5.4.1"
  },
  "tagline" : "You Know, for Search"
}

Configure pfSense to send logs through Syslog to port 5140

Testing Logstash

Logstash server logs are stored in the following file,

cat /var/log/logstash/logstash.log

Logs from pfSense can be viewed by,

tail -f /var/log/logstash/logstash.stdout
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment