Skip to content

Instantly share code, notes, and snippets.

# Import required packages
import json
import requests
import pandas as pd
import urllib
import time
from google.colab import files
import io
# Convert to json format
result_json = json.loads(result)
print(result_json)
with open('result.json', 'w') as outfile:
json.dump(result_json, outfile)
files.download('result.json')
# Define URL
url = 'https://www.example.co.uk'
# API request url
result = urllib.request.urlopen('https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url={}/&strategy=mobile'\
.format(url)).read().decode('UTF-8')
print(result)
uploaded = files.upload()
#if your column header is something other than 'url' please define it here
column_header='url'
# Get the filename from the upload so we can read it into a CSV.
for key in uploaded.keys():
filename = key
# Read the selected file into a Pandas Dataframe
df = pd.read_csv(io.BytesIO(uploaded[filename]))
df.head()
response_object = {}
# Iterate through the df
for x in range(0, len(df)):
# Define request parameter
url = df.iloc[x][column_header]
# Make request
# Create dataframe to store field data responses
df_pagespeed_results = pd.DataFrame(columns=
['url',
'Overall_Category',
'Largest_Contentful_Paint',
'First_Input_Delay',
'Cumulative_Layout_Shift',
'First_Contentful_Paint',
'Time_to_Interactive',
'Total_Blocking_Time',
for (url, x) in zip(
response_object.keys(),
range(0, len(response_object))
):
# URLs
df_pagespeed_results.loc[x, 'url'] =\
response_object[url]['lighthouseResult']['finalUrl']
# Overall Category
for (url, x) in zip(
response_object.keys(),
range(0, len(response_object))
):
# URLs
df_pagespeed_results.loc[x, 'url'] =\
response_object[url]['lighthouseResult']['finalUrl']
# Overall Category
summary = df_pagespeed_results
df_pagespeed_results.head()