Skip to content

Instantly share code, notes, and snippets.

@chrisdlangton
Last active August 22, 2017 06:51
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save chrisdlangton/f26b3ca029c6b90f130e4117399248b4 to your computer and use it in GitHub Desktop.
Save chrisdlangton/f26b3ca029c6b90f130e4117399248b4 to your computer and use it in GitHub Desktop.
Network Tap monitoring
#!/usr/bin/env bash
AWS=/home/`whoami`/.local/bin/aws
DATE=`date "+%Y%m%d-%H"`
BUCKET=<change me>
AWS_PROFILE=<change me>
BACKUPDIR=/mnt/share/backup/tshark/
TMPDIR=/tmp/${DATE}/
LOG_DIR=`pwd`/
LOGNAME="capture.log"
echo -e "Creating ${TMPDIR}"
mkdir -p ${TMPDIR} && \
echo -e "ok"
echo -e "Copy from NAS ${BACKUPDIR}"
cp ${BACKUPDIR}*.tar.gz ${TMPDIR} && \
echo -e "ok"
echo -e "AWS S3 sync starting"
$AWS s3 sync \
${TMPDIR} \
s3://${BUCKET} \
--profile=${AWS_PROFILE} \
--exclude "*" \
--include "*.tar.gz" \
>> ${LOG_DIR}/${LOGNAME} && \
echo -e "ok"
echo -e "Cleanup ${TMPDIR}"
rm -rf ${TMPDIR} && \
echo -e "done"
#!/usr/bin/env bash
AWS=/home/`whoami`/.local/bin/aws
NOW=`date "+%Y%m%d-%H"`
BUCKET=<change me>
AWS_PROFILE=<change me>
BACKUPDIR=<change me>
TMPDIR=/tmp/${NOW}/
LOG_DIR=`pwd`/
LOGNAME="capture.log"
echo -e "Creating ${TMPDIR}"
mkdir -p ${TMPDIR} && \
echo -e "ok"
echo -e "Copy from NAS ${BACKUPDIR}"
cp ${BACKUPDIR}`date -d "1 hour ago" "+%Y%m%d-%H"`*.tar.gz ${TMPDIR} && \
echo -e "ok"
echo -e "AWS S3 upload starting"
$AWS s3 cp \
${TMPDIR} \
s3://${BUCKET} \
--profile=${AWS_PROFILE} \
--exclude "*" \
--include "*.tar.gz" \
--recursive \
>> ${LOG_DIR}/${LOGNAME} && \
echo -e "ok"
echo -e "Cleanup ${TMPDIR}"
rm -rf ${TMPDIR} && \
echo -e "done"
import sys
import threading
import pandas as pd
import boto3
import ntpath
from datetime import datetime
from pprint import pprint
BUCKET = '< change me >'
PROFILE = '<.change me >'
class ProgressPercentage(object):
def __init__(self, filename):
self._filename = filename
self._seen_so_far = 0
self._lock = threading.Lock()
def __call__(self, bytes_amount):
# To simplify we'll assume this is hooked up
# to a single filename.
with self._lock:
self._seen_so_far += bytes_amount
sys.stdout.write(
"\r%s --> %s bytes transferred" % (
self._filename, self._seen_so_far))
sys.stdout.flush()
def process(df):
for i, row in enumerate(df.values):
epoch, \
src_ip, \
dst_ip, \
http_host, \
http_req_uri, \
dns_qry_name, \
tcp_src_port, \
tcp_dst_port, \
udp_src_port, \
udp_dst_port, \
protocol, \
info, \
src_mac, \
dst_mac, \
user_agent, \
eth_src, \
eth_dst, \
eth_src_resolved, \
eth_dst_resolved = row
src_port = tcp_src_port
if not tcp_src_port:
src_port = udp_src_port
dst_port = tcp_dst_port
if not tcp_dst_port:
dst_port = udp_dst_port
if not src_mac:
src_port = eth_src
if not dst_mac:
dst_mac = eth_dst
dt = datetime.utcfromtimestamp(float(epoch))
egress = False
if src_ip.startswith("10.") and not dst_ip.startswith("10.") :
egress = True
ingress = False
if dst_ip.startswith("10.") and not src_ip.startswith("10.") :
ingress = True
lan = False
if dst_ip.startswith("10.") and src_ip.startswith("10.") :
lan = True
return dt.isoformat(), \
src_ip, \
dst_ip, \
http_host, \
lan, \
egress, \
ingress, \
http_req_uri, \
dns_qry_name, \
src_port, \
dst_port, \
protocol, \
info, \
src_mac, \
dst_mac, \
user_agent, \
eth_src_resolved, \
eth_dst_resolved
def download(object):
fp = object.get('Key')
filename = ntpath.basename(fp)
obj = client.download_file(BUCKET, fp, filename, Callback=ProgressPercentage(filename))
print('Done')
return obj, filename
dev = boto3.session.Session(profile_name=PROFILE)
client = dev.client('s3')
chunksize = 10 ** 8
objects = client.list_objects(Bucket=BUCKET)
li = []
for object in objects.get('Contents'):
obj, filename = download(object)
for df in pd.read_csv(filename, na_values=['nan'], keep_default_na=False, compression='gzip', error_bad_lines=False, chunksize=chunksize):
li.extend(process(df))
break
pprint(li)
#!/usr/bin/env bash
tshark --version || apt-get install -y tshark
pip --version || apt-get install -y python python-pip
pip install --upgrade pip
aws --ersion || pip install awscli
pip install --upgrade awscli
aws configure --profile=`whoami`
#!/usr/bin/env bash
DATE=`date "+%Y%m%d-%H%M"`
OUTPUT_DIR=<change me>
LOG_DIR=`pwd`/
WORKDIR=`pwd`/
FILENAME="${DATE}_suffix"
EXT=".csv"
C_EXT=".tar.gz"
LOGNAME="capture.log"
DURATION=$1
INTERFACE=$2
if [[ -z ${DURATION} ]]; then
DURATION=60
fi
if [[ -z ${INTERFACE} ]]; then
INTERFACE=any
fi
echo -e "Capturing interface [${INTERFACE}] for [${DURATION}] seconds"
echo -e "Output: ${OUTPUT_DIR}${FILENAME}${EXT}"
echo -e "Log: ${LOG_DIR}${LOGNAME}"
mkdir -p ${OUTPUT_DIR}
tshark -a duration:${DURATION} \
-i ${INTERFACE} \
-u s \
-E separator=, \
-E quote=d \
-E occurrence=f \
-T fields \
-e frame.time_epoch \
-e ip.src \
-e ip.dst \
-e http.host \
-e http.request.uri \
-e dns.qry.name \
-e tcp.srcport \
-e tcp.dstport \
-e udp.srcport \
-e udp.dstport \
-e _ws.col.Protocol \
-e _ws.col.Info \
-e arp.src.hw_mac \
-e arp.dst.hw_mac \
-e http.user_agent \
-e eth.src \
-e eth.dst \
-e eth.src_resolved \
-e eth.dst_resolved \
1>>${WORKDIR}${FILENAME}${EXT} \
2>>${LOG_DIR}${LOGNAME}
env GZIP=-9 tar cvzf ${WORKDIR}${FILENAME}${C_EXT} ${WORKDIR}${FILENAME}${EXT} && \
cp ${WORKDIR}${FILENAME}${C_EXT} ${OUTPUT_DIR}${FILENAME}${C_EXT} && \
rm ${WORKDIR}${FILENAME}${EXT} && \
rm ${WORKDIR}${FILENAME}${C_EXT}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment