Skip to content

Instantly share code, notes, and snippets.

@jonathanballs
Created March 7, 2017 18:38
Show Gist options
  • Save jonathanballs/2194dad62a38fe3d7a478173ada7feac to your computer and use it in GitHub Desktop.
Save jonathanballs/2194dad62a38fe3d7a478173ada7feac to your computer and use it in GitHub Desktop.
Get list of free pcs at uon
#!/usr/bin/python2
import requests
import datetime
import json
from bs4 import BeautifulSoup
PCFINDER_URL = "http://pcfinder.nottingham.ac.uk/"
# Recursively searches nottingham PCFINDER to find the number of free pcs
def get_free_pcs(url):
page = BeautifulSoup(requests.get(url).text, 'html.parser')
# If this is a group of buildings then follow links
if "building" not in url:
subgroups = {}
for subgroup in page('a', {'class':'menu'}):
subgroup_url = PCFINDER_URL + subgroup.get('href')
subgroups[subgroup.string] = get_free_pcs(subgroup_url)
return subgroups
else:
# For each room but skip total
for room in page.find('tbody')('tr')[:-1]:
rooms = {}
rooms[room.td.string] = {
'free': int(room('td')[1].string),
'used': int(room('td')[2].string)
}
return rooms
log_entry = {
'timestamp': datetime.datetime.now().isoformat(),
'pcs': get_free_pcs(PCFINDER_URL)
}
print json.dumps(log_entry)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment