Skip to content

Instantly share code, notes, and snippets.

@caelor
Created October 15, 2016 17:12
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save caelor/f4cea6352351a460ecdb5715fd484f25 to your computer and use it in GitHub Desktop.
Save caelor/f4cea6352351a460ecdb5715fd484f25 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
# Script to read DOCSIS stats from a VM SuperHub 2ac in Modem mode,
# and insert them into an InfluxDB database (e.g. for graphing)
#
# Intended to be run every minute by cron job.
# Based on https://github.com/edent/SuperHub/blob/master/SuperHubStats.py
# Adapted for InfluxDB and Modem mode
from bs4 import BeautifulSoup
import urllib2
import csv
import datetime
import time
from influxdb import InfluxDBClient
import re
# This script reads statistics from a VirginMedia SuperHub2 / 2ac
# Requires http://www.crummy.com/software/BeautifulSoup/ and InfluxDB python libs
# A timestamp
#timestamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
timestamp = datetime.datetime.utcnow().isoformat("T") + "Z"
# The default SuperHub IP address.
SuperHubIP = "http://192.168.100.1/"
InfluxIP = "<InfluxDB Server>"
InfluxPort = <InfluxDB Port>
InfluxDB = "<InfluxDB Database>"
InfluxUser = "<InfluxDB User>"
InfluxPass = "<InfluxDB Password>"
postdata = []
# Fetch the HTML pages
upstream = urllib2.urlopen(SuperHubIP + "cgi-bin/VmRouterStatusUpstreamCfgCgi")
downstream = urllib2.urlopen(SuperHubIP + "cgi-bin/VmRouterStatusDownstreamCfgCgi")
# Parse them
up_soup = BeautifulSoup(upstream.read(), 'lxml')
down_soup = BeautifulSoup(downstream.read(), 'lxml')
# Find the Upstream power levels
'''
<tr>
<td class="title">Power Level (dBmV)</td>
<td>41.00</td>
<td>N/A</td>
<td>N/A</td>
<td>45.00</td>
</tr>
'''
us_power_label = up_soup.find(text="Power Level (dBmV)")
us_power_table = us_power_label.parent.parent
us_power = us_power_table.findAll('td')
us_chanid_label = up_soup.find(text="Channel ID")
us_chanid_table = us_chanid_label.parent.parent
us_chanid = us_chanid_table.findAll('td')
us_freq_label = up_soup.find(text="Frequency (Hz)")
us_freq_table = us_freq_label.parent.parent
us_freq = us_freq_table.findAll('td')
us_t1_label = up_soup.find(text="T1 Timeouts")
us_t1_table = us_t1_label.parent.parent
us_t1 = us_t1_table.findAll('td')
us_t2_label = up_soup.find(text="T2 Timeouts")
us_t2_table = us_t2_label.parent.parent
us_t2 = us_t2_table.findAll('td')
us_t3_label = up_soup.find(text="T3 Timeouts")
us_t3_table = us_t3_label.parent.parent
us_t3 = us_t3_table.findAll('td')
us_t4_label = up_soup.find(text="T4 Timeouts")
us_t4_table = us_t4_label.parent.parent
us_t4 = us_t4_table.findAll('td')
# Prepare the Upstream measurements
for i in range(1, len(us_power)):
fields = {}
tag = "us%d" % (i)
if us_power[i].text != "N/A":
fields["power_dbmv"] = float(us_power[i].text)
if us_chanid[i].text != "N/A":
fields["channel_id"] = int(us_chanid[i].text)
if us_freq[i].text != "N/A":
fields["frequency_hz"] = int(us_freq[i].text)
if us_t1[i].text != "N/A":
fields["t1_timeouts"] = int(us_t1[i].text)
if us_t2[i].text != "N/A":
fields["t2_timeouts"] = int(us_t2[i].text)
if us_t3[i].text != "N/A":
fields["t3_timeouts"] = int(us_t3[i].text)
if us_t4[i].text != "N/A":
fields["t4_timeouts"] = int(us_t4[i].text)
postdata.append({
"measurement": "upstream",
"tags": {
"channel": tag
},
"time": timestamp,
"fields": fields
})
# Find Downstream levels
ds_power_label = down_soup.find(text="Power Level (dBmV)")
ds_power_table = ds_power_label.parent.parent
ds_power = ds_power_table.findAll('td')
ds_rx_label = down_soup.find(text="RxMER (dB)")
ds_rx_table = ds_rx_label.parent.parent
ds_rx = ds_rx_table.findAll('td')
ds_chanid_label = down_soup.find(text="Channel ID")
ds_chanid_table = ds_chanid_label.parent.parent
ds_chanid = ds_chanid_table.findAll('td')
ds_freq_label = down_soup.find(text="Frequency (Hz)")
ds_freq_table = ds_freq_label.parent.parent
ds_freq = ds_freq_table.findAll('td')
ds_preerr_label = down_soup.find(text=re.compile("Pre RS Errors"))
ds_preerr_table = ds_preerr_label.parent.parent
ds_preerr = ds_preerr_table.findAll('td')
ds_posterr_label = down_soup.find(text=re.compile("Post RS Errors"))
ds_posterr_table = ds_posterr_label.parent.parent
ds_posterr = ds_posterr_table.findAll('td')
ds_lock_label = down_soup.find(text=re.compile("Lock Status"))
ds_lock_table = ds_lock_label.parent.parent
ds_lock = ds_lock_table.findAll('td')
# Prepare the Downstram measurements
for i in range(1, len(ds_power)):
fields = {}
tag = "ds%d" % (i)
if ds_power[i].text != "N/A":
fields["power_dbmv"] = float(ds_power[i].text)
if ds_chanid[i].text != "N/A":
fields["channel_id"] = int(ds_chanid[i].text)
if ds_freq[i].text != "N/A":
fields["frequency_hz"] = int(ds_freq[i].text)
if ds_preerr[i].text != "N/A":
fields["pre_rs_errors"] = int(ds_preerr[i].text)
if ds_posterr[i].text != "N/A":
fields["post_rs_errors"] = int(ds_posterr[i].text)
if ds_rx[i].text != "N/A":
fields["rxmer_db"] = float(ds_rx[i].text)
if ds_lock[i].text != "N/A":
fields["lock_status"] = ds_lock[i].text
postdata.append({
"measurement": "downstream",
"tags": {
"channel": tag
},
"time": timestamp,
"fields": fields
})
client = InfluxDBClient(InfluxIP, InfluxPort, InfluxUser, InfluxPass, InfluxDB)
client.write_points(postdata)
# All done. Bye-bye!
exit()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment