Skip to content

Instantly share code, notes, and snippets.

@nickjevershed
Created February 26, 2020 02:35
Show Gist options
  • Save nickjevershed/494c153919cb1687971b8d8f83f5efe6 to your computer and use it in GitHub Desktop.
Save nickjevershed/494c153919cb1687971b8d8f83f5efe6 to your computer and use it in GitHub Desktop.
Demo of API paging
import requests
import scraperwiki
import math
# Function to save data into a SQLite database
def saveData(records):
scraperwiki.sqlite.save(unique_keys=["_id"], data=records)
# Get initial page of API results and create a json object
result = requests.get("https://data.gov.au/data/api/3/action/datastore_search?offset=0&resource_id=88399d53-d55c-466c-8f4a-6cb965d24d6d").json()
print(result['success'])
# If API was successful then do some stuff
if result['success']:
saveData(result['result']['records'])
# Page through results
totalRecords = result['result']['total']
pageSize = 100
pageTimes = math.ceil(totalRecords/pageSize)
for pageNo in range(100,pageTimes*pageSize,100):
print(pageNo)
newUrl = "https://data.gov.au/data/api/3/action/datastore_search?offset={pageNo}&resource_id=88399d53-d55c-466c-8f4a-6cb965d24d6d".format(pageNo=pageNo)
print(newUrl)
newResult = requests.get(newUrl).json()
if newResult['success']:
saveData(newResult['result']['records'])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment