-
-
Save chutten/feb43ea722a7a0dcacfe399ba03a1ffe to your computer and use it in GitHub Desktop.
slow_script_saturdays
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# coding: utf-8 | |
# ### Slow Script Saturdays | |
# This is a small analysis of the `SLOW_SCRIPT_NOTICE_COUNT` histogram on two nightly builds: 20160903 and 20160910 (both Saturdays). | |
# In[1]: | |
import ujson as json | |
import matplotlib.pyplot as plt | |
import pandas as pd | |
import numpy as np | |
import plotly.plotly as py | |
from plotly.graph_objs import * | |
from moztelemetry import get_pings, get_pings_properties, get_one_ping_per_client, get_clients_history, get_records | |
get_ipython().magic(u'pylab inline') | |
# In[2]: | |
sc.defaultParallelism | |
# ### 20160910 | |
# In[11]: | |
pings = get_pings(sc, app="Firefox", channel="nightly", build_id=("20160910000000", "20160910999999"), fraction=1) | |
# ... and extract only the attributes we need from the Telemetry submissions: | |
# In[35]: | |
subset = get_pings_properties(pings, ["clientId", | |
"environment/system/os/name", | |
"payload/info/subsessionLength", | |
"payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"]) | |
# In[36]: | |
subset.filter(lambda p: p["payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"] is not None) .map(lambda p: (p["environment/system/os/name"], 1)) .countByKey() | |
# In[17]: | |
subset.count() | |
# In[19]: | |
usage = subset.map(lambda p: max(0, min(25, p["payload/info/subsessionLength"] / 3600))).reduce(lambda a,b: a+b) | |
usage | |
# In[21]: | |
notices = subset.filter(lambda p: p["payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"] is not None).map(lambda p: p["payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"]).reduce(lambda a,b: a+b) | |
notices | |
# In[38]: | |
reporting_pings = subset.filter(lambda p: p["payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"] is not None).count() | |
reporting_pings | |
# In[39]: | |
reporting_clients = subset.filter(lambda p: p["payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"] is not None).map(lambda p: p["clientId"]).distinct().count() | |
reporting_clients | |
# In[41]: | |
notices_per_ping = 1.0 * notices / subset.count() | |
notices_per_ping | |
# In[52]: | |
notices_per_reporting_client = 1.0 * notices / reporting_clients | |
notices_per_reporting_client | |
# In[53]: | |
notices_per_usage_hour = 1.0 * notices / usage | |
notices_per_usage_hour | |
# ### 20160903 | |
# In[25]: | |
last_weeks_pings = get_pings(sc, app="Firefox", channel="nightly", build_id=("20160903000000", "20160903999999"), fraction=1) | |
# In[33]: | |
last_weeks_subset = get_pings_properties(last_weeks_pings, ["clientId", | |
"environment/system/os/name", | |
"payload/info/subsessionLength", | |
"payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"]) | |
# In[34]: | |
last_weeks_subset.filter(lambda p: p["payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"] is not None) .map(lambda p: (p["environment/system/os/name"], 1)) .countByKey() | |
# In[27]: | |
last_weeks_subset.count() | |
# In[28]: | |
last_weeks_usage = last_weeks_subset.map(lambda p: max(0, min(25, p["payload/info/subsessionLength"] / 3600))).reduce(lambda a,b: a+b) | |
last_weeks_usage | |
# In[29]: | |
last_weeks_notices = last_weeks_subset.filter(lambda p: p["payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"] is not None).map(lambda p: p["payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"]).reduce(lambda a,b: a+b) | |
last_weeks_notices | |
# In[42]: | |
last_weeks_reporting_pings = last_weeks_subset.filter(lambda p: p["payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"] is not None).count() | |
last_weeks_reporting_pings | |
# In[43]: | |
last_weeks_reporting_clients = last_weeks_subset.filter(lambda p: p["payload/histograms/SLOW_SCRIPT_NOTICE_COUNT"] is not None).map(lambda p: p["clientId"]).distinct().count() | |
last_weeks_reporting_clients | |
# In[46]: | |
last_weeks_notices_per_ping = 1.0 * last_weeks_notices / last_weeks_subset.count() | |
last_weeks_notices_per_ping | |
# In[51]: | |
last_weeks_notices_per_reporting_client = 1.0 * last_weeks_notices / last_weeks_reporting_clients | |
last_weeks_notices_per_reporting_client | |
# In[50]: | |
last_weeks_notices_per_usage_hour = 1.0 * last_weeks_notices / last_weeks_usage | |
last_weeks_notices_per_usage_hour | |
# ### Observations | |
# | |
# 0903 and 0910's notices per usage hour metrics remained relatively stable (0.46 to 0.48), but everything else changed. | |
# | |
# Most alarming is the notices per reporting client measure which jumped from 10.7 to 31.1. This is consistent with going from a population where "many are having a poor time" to a population where "few are having a terrible time". |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment