Skip to content

Instantly share code, notes, and snippets.

@jeremywhelchel
Last active October 14, 2023 16:19
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jeremywhelchel/84d44a51513dfb89d76cd5f388f9e07e to your computer and use it in GitHub Desktop.
Save jeremywhelchel/84d44a51513dfb89d76cd5f388f9e07e to your computer and use it in GitHub Desktop.
Simple bash scripts to collect connection statistics to a remote host (ping latency and wifi signal strength) and generate graphs over time. I couldn't find any simple solutions for this task--only heavyweight frameworks such as smokeping/vaping or prometheus or other cloud-centric monitoring solutions.
#!/bin/bash
# Monitor connection statistics (latency + wifi signal strength) continuously.
# Stores timeseries data in RRDTool
set -e
WIFI_DEV="wlan0"
MAC='aa:bb:cc:dd:ee:ff'
PING_HOST='192.168.1.10'
DB='/home/u/connection_stats.rrd'
UPDATE_INTERVAL=5 # Loop every 5 seconds
get_station_info() {
unset signal
iw dev ${WIFI_DEV} station dump | grep "Station ${MAC}" > /dev/null || station_missing=true;
if [[ "$station_missing" = true ]]; then
echo "Wifi station missing: ${MAC}"
return
fi
# TODO: can extract other metrics here: "failed/retries/etc..."
signal=$(iw dev ${WIFI_DEV} station get $MAC | grep 'signal:' | awk '{print $2}')
}
get_ping_info() {
ping_time=$(fping -u -C1 -p1000 -e $PING_HOST 2>&1 | sed -e 's/.*: //')
}
while :; do
# This is rather verbose--especially 'updatev' below (rather than 'update')
# Assumes this is streamed to some log system with rotation (e.g. systemd)
echo '*****'
echo "Time: $(date +%H:%M:%S)"
get_station_info
echo "Signal: ${signal}"
get_ping_info
echo "Ping result: ${ping_time}"
if [ "$ping_time" = "-" ]; then
# U=no value for this time point
ping_time='U'
fi
# N=use current time
echo rrdtool updatev ${DB} N:${signal}:${ping_time}
rrdtool updatev ${DB} N:${signal}:${ping_time}
# sleep until the next X seconds are full
perl -e "sleep ${UPDATE_INTERVAL} - time % ${UPDATE_INTERVAL}"
done
exit 0;
# The following command creates the RRD file initially and defines
# the timeseries collection parameters.
# Store every 5 seconds
# Every 5 seconds (5s*1) for the last 6 hours
# Every 5 minutes (5s*12*5) for the 30 days (720 hours=43200 minutes)
# Total file size: 200K
rrdtool create connection_stats.rrd \
--step 5 \
DS:signal:GAUGE:10:-100:-10 \
DS:latency:GAUGE:10:0:U \
RRA:AVERAGE:0.5:1:4320 \
RRA:AVERAGE:0.5:60:8760
#!/bin/bash
# Periodically generate graphs from data collected from connection_monitor.sh.
# Push them to a GCS bucket for public serving.
set -e
DB='/home/u/connection_stats.rrd'
DIR='/home/u/graphs'
GCS_FILE='gs://mybucket/connection_stats.png'
UPDATE_INTERVAL=300 # Every 5 minutes
make_graph() {
period=$1
start=$(date +%s -d "-${period}")
echo $(date -d "-${period}")
rrdtool graphv ${DIR}/connection-latency-${period}.png \
--title="Latency\n(${period}) lower=better" \
--start=${start} \
DEF:latency=${DB}:latency:AVERAGE \
LINE1:latency#6699CC:'latency (ms)'
rrdtool graphv ${DIR}/connection-signal-${period}.png \
--title="Wifi Signal\n(${period}) higher=better" \
--start=${start} \
DEF:signal=${DB}:signal:AVERAGE \
LINE1:signal#ff0000:'signal (db)' \
gm convert \
${DIR}/connection-latency-${period}.png \
${DIR}/connection-signal-${period}.png \
-append \
${DIR}/connection-${period}.png
}
make_graphs() {
make_graph 10min
make_graph 2hour
make_graph 24hour
make_graph 168hour
gm convert \
${DIR}/connection-10min.png \
${DIR}/connection-2hour.png \
${DIR}/connection-24hour.png \
${DIR}/connection-168hour.png \
+append \
${DIR}/connection.png
}
upload_to_cloud() {
# Pricing:
# 2 operations * 12/hr * 24 hr/day * 30 day/mo = 17k op/mo
# 17 kop/mo * 0.005 $/kop = 0.085 $/mo (cheap :P)
echo "Uploading to GCS"
gcloud storage cp ${DIR}/connection.png ${GCS_FILE}
echo "Updating GCS access + cache parameters"
gcloud storage objects update ${GCS_FILE} \
--add-acl-grant=entity=AllUsers,role=READER \
--cache-control=no-cache
}
while :; do
echo '*****'
echo "Time: $(date +%H:%M:%S)"
make_graphs
upload_to_cloud
# sleep until the next X seconds are full
perl -e "sleep ${UPDATE_INTERVAL} - time % ${UPDATE_INTERVAL}"
done
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment