Skip to content

Instantly share code, notes, and snippets.

@bsmedberg
Last active August 18, 2017 18:07
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bsmedberg/53b6cc561bb4fc5685abfa175fde3342 to your computer and use it in GitHub Desktop.
Save bsmedberg/53b6cc561bb4fc5685abfa175fde3342 to your computer and use it in GitHub Desktop.
chrome-input-latency
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"---\n",
"title: \"Chrome Input Latency\"\n",
"authors:\n",
"- bsmedberg\n",
"created_at: 2017-04-13\n",
"---"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false,
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Unable to parse whitelist (/mnt/anaconda2/lib/python2.7/site-packages/moztelemetry/histogram-whitelists.json). Assuming all histograms are acceptable.\n"
]
}
],
"source": [
"import ujson as json\n",
"import matplotlib.pyplot as plt\n",
"import pandas as pd\n",
"import numpy as np\n",
"import plotly.plotly as py\n",
"\n",
"from plotly.graph_objs import *\n",
"from moztelemetry import get_pings_properties, get_one_ping_per_client\n",
"from moztelemetry.dataset import Dataset\n",
"from moztelemetry.histogram import Histogram\n",
"from operator import add\n",
"\n",
"from datetime import date, timedelta\n",
"\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"480"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"sc.defaultParallelism"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"def ping_filter(p):\n",
" if p.get(\"environment/system/os/name\", None) != \"Windows_NT\":\n",
" return False\n",
" if p.get(\"payload/info/subsessionLength\", 0) <= 0:\n",
" return False\n",
" if p.get(\"environment/settings/e10sEnabled\", False) != True:\n",
" return False\n",
" addons = p.get(\"environment/addons/activeAddons\", {}) or {}\n",
" for a in addons.itervalues():\n",
" if a.get(\"isSystem\", False) != True:\n",
" return False\n",
" return True\n",
"\n",
"def print_histogram(histogram, processtype, hours):\n",
" print \"{} process:\".format(processtype)\n",
" stotal = 0\n",
" for cutoff, count in histogram.buckets.sort_index(ascending=False).iteritems():\n",
" if cutoff < 150:\n",
" break\n",
" stotal = stotal + count\n",
" print \" {:5}ms: {:0.3f} hours\".format(cutoff, float(hours) / stotal) \n",
"\n",
"def input_latency_mtbf(startdate, days=7, channel=\"nightly\"):\n",
" sdstring = startdate.strftime(\"%Y%m%d\")\n",
" edstring = (startdate + timedelta(days=days)).strftime(\"%Y%m%d\")\n",
" \n",
" pings = Dataset.from_source(\"telemetry\") \\\n",
" .where(docType='main') \\\n",
" .where(submissionDate=lambda d: sdstring <= d <= edstring) \\\n",
" .where(appUpdateChannel=channel) \\\n",
" .records(sc)\n",
"\n",
" data = get_pings_properties(pings, [\n",
" \"environment/system/os/name\",\n",
" \"environment/settings/e10sEnabled\",\n",
" \"environment/addons/activeAddons\",\n",
" \"payload/histograms/INPUT_EVENT_RESPONSE_MS\",\n",
" \"payload/info/subsessionLength\"], with_processes=True) \\\n",
" .filter(ping_filter).cache()\n",
"\n",
" total_session_length_hours = data \\\n",
" .map(lambda p: p[\"payload/info/subsessionLength\"]) \\\n",
" .reduce(add) / 60 / 60\n",
"\n",
" print \"Input jank MTBF for {} channel, {} for {} days:\".format(channel, startdate.strftime(\"%Y-%m-%d\"), days)\n",
" print \"Usage hours total: {}\".format(total_session_length_hours)\n",
"\n",
" aggregated_input_latency_chrome = data \\\n",
" .map(lambda p: p.get(\"payload/histograms/INPUT_EVENT_RESPONSE_MS_parent\", None)) \\\n",
" .filter(lambda v: v is not None) \\\n",
" .reduce(add)\n",
" chrome_histogram = Histogram(\"INPUT_EVENT_RESPONSE_MS\", aggregated_input_latency_chrome)\n",
" print_histogram(chrome_histogram, \"chrome\", total_session_length_hours)\n",
"\n",
" aggregated_input_latency_content = data \\\n",
" .map(lambda p: p.get(\"payload/histograms/INPUT_EVENT_RESPONSE_MS_children\", None)) \\\n",
" .filter(lambda v: v is not None) \\\n",
" .reduce(add)\n",
" content_histogram = Histogram(\"INPUT_EVENT_RESPONSE_MS\", aggregated_input_latency_content)\n",
" print_histogram(content_histogram, \"content\", total_session_length_hours)\n",
" \n",
" print\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Caculate mean time between failure (MTBF) of \"hangs at least as long as X ms\". Cut off at 150ms because for now that's not a failure case."
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Input jank MTBF for nightly channel, 2017-04-02 for 7 days:\n",
"Usage hours total: 680474\n",
"chrome process:\n",
" 10000ms: 5.953 hours\n",
" 8416ms: 5.324 hours\n",
" 7083ms: 4.778 hours\n",
" 5961ms: 4.276 hours\n",
" 5017ms: 3.736 hours\n",
" 4222ms: 3.315 hours\n",
" 3553ms: 2.941 hours\n",
" 2990ms: 2.594 hours\n",
" 2516ms: 2.270 hours\n",
" 2117ms: 1.977 hours\n",
" 1782ms: 1.694 hours\n",
" 1500ms: 1.442 hours\n",
" 1262ms: 1.230 hours\n",
" 1062ms: 1.056 hours\n",
" 894ms: 0.904 hours\n",
" 752ms: 0.774 hours\n",
" 633ms: 0.659 hours\n",
" 533ms: 0.562 hours\n",
" 449ms: 0.477 hours\n",
" 378ms: 0.409 hours\n",
" 318ms: 0.349 hours\n",
" 268ms: 0.293 hours\n",
" 226ms: 0.233 hours\n",
" 190ms: 0.189 hours\n",
" 160ms: 0.161 hours\n",
"content process:\n",
" 10000ms: 0.397 hours\n",
" 8416ms: 0.357 hours\n",
" 7083ms: 0.319 hours\n",
" 5961ms: 0.284 hours\n",
" 5017ms: 0.250 hours\n",
" 4222ms: 0.218 hours\n",
" 3553ms: 0.189 hours\n",
" 2990ms: 0.162 hours\n",
" 2516ms: 0.138 hours\n",
" 2117ms: 0.116 hours\n",
" 1782ms: 0.097 hours\n",
" 1500ms: 0.080 hours\n",
" 1262ms: 0.066 hours\n",
" 1062ms: 0.054 hours\n",
" 894ms: 0.045 hours\n",
" 752ms: 0.037 hours\n",
" 633ms: 0.030 hours\n",
" 533ms: 0.025 hours\n",
" 449ms: 0.020 hours\n",
" 378ms: 0.017 hours\n",
" 318ms: 0.014 hours\n",
" 268ms: 0.012 hours\n",
" 226ms: 0.010 hours\n",
" 190ms: 0.008 hours\n",
" 160ms: 0.007 hours\n",
"\n"
]
}
],
"source": [
"input_latency_mtbf(date(2017, 4, 2), channel=\"nightly\")"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Input jank MTBF for beta channel, 2017-04-02 for 7 days:\n",
"Usage hours total: 37145041\n",
"chrome process:\n",
" 10000ms: 3.260 hours\n",
" 8416ms: 2.947 hours\n",
" 7083ms: 2.665 hours\n",
" 5961ms: 2.386 hours\n",
" 5017ms: 2.095 hours\n",
" 4222ms: 1.874 hours\n",
" 3553ms: 1.679 hours\n",
" 2990ms: 1.503 hours\n",
" 2516ms: 1.339 hours\n",
" 2117ms: 1.189 hours\n",
" 1782ms: 1.047 hours\n",
" 1500ms: 0.919 hours\n",
" 1262ms: 0.812 hours\n",
" 1062ms: 0.722 hours\n",
" 894ms: 0.641 hours\n",
" 752ms: 0.568 hours\n",
" 633ms: 0.495 hours\n",
" 533ms: 0.429 hours\n",
" 449ms: 0.370 hours\n",
" 378ms: 0.322 hours\n",
" 318ms: 0.279 hours\n",
" 268ms: 0.234 hours\n",
" 226ms: 0.194 hours\n",
" 190ms: 0.161 hours\n",
" 160ms: 0.141 hours\n",
"content process:\n",
" 10000ms: 0.564 hours\n",
" 8416ms: 0.481 hours\n",
" 7083ms: 0.408 hours\n",
" 5961ms: 0.344 hours\n",
" 5017ms: 0.289 hours\n",
" 4222ms: 0.241 hours\n",
" 3553ms: 0.200 hours\n",
" 2990ms: 0.165 hours\n",
" 2516ms: 0.136 hours\n",
" 2117ms: 0.112 hours\n",
" 1782ms: 0.092 hours\n",
" 1500ms: 0.075 hours\n",
" 1262ms: 0.062 hours\n",
" 1062ms: 0.051 hours\n",
" 894ms: 0.042 hours\n",
" 752ms: 0.034 hours\n",
" 633ms: 0.028 hours\n",
" 533ms: 0.023 hours\n",
" 449ms: 0.019 hours\n",
" 378ms: 0.016 hours\n",
" 318ms: 0.014 hours\n",
" 268ms: 0.012 hours\n",
" 226ms: 0.010 hours\n",
" 190ms: 0.008 hours\n",
" 160ms: 0.007 hours\n",
"\n"
]
}
],
"source": [
"input_latency_mtbf(date(2017, 4, 2), channel=\"beta\")"
]
}
],
"metadata": {
"anaconda-cloud": {},
"kernelspec": {
"display_name": "Python [default]",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.12"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
# coding: utf-8
# ---
# title: "Chrome Input Latency"
# authors:
# - bsmedberg
# created_at: 2017-04-13
# ---
# In[1]:
import ujson as json
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import plotly.plotly as py
from plotly.graph_objs import *
from moztelemetry import get_pings_properties, get_one_ping_per_client
from moztelemetry.dataset import Dataset
from moztelemetry.histogram import Histogram
from operator import add
from datetime import date, timedelta
get_ipython().magic(u'matplotlib inline')
# In[2]:
sc.defaultParallelism
# In[7]:
def ping_filter(p):
if p.get("environment/system/os/name", None) != "Windows_NT":
return False
if p.get("payload/info/subsessionLength", 0) <= 0:
return False
if p.get("environment/settings/e10sEnabled", False) != True:
return False
addons = p.get("environment/addons/activeAddons", {}) or {}
for a in addons.itervalues():
if a.get("isSystem", False) != True:
return False
return True
def print_histogram(histogram, processtype, hours):
print "{} process:".format(processtype)
stotal = 0
for cutoff, count in histogram.buckets.sort_index(ascending=False).iteritems():
if cutoff < 150:
break
stotal = stotal + count
print " {:5}ms: {:0.3f} hours".format(cutoff, float(hours) / stotal)
def input_latency_mtbf(startdate, days=7, channel="nightly"):
sdstring = startdate.strftime("%Y%m%d")
edstring = (startdate + timedelta(days=days)).strftime("%Y%m%d")
pings = Dataset.from_source("telemetry") .where(docType='main') .where(submissionDate=lambda d: sdstring <= d <= edstring) .where(appUpdateChannel=channel) .records(sc)
data = get_pings_properties(pings, [
"environment/system/os/name",
"environment/settings/e10sEnabled",
"environment/addons/activeAddons",
"payload/histograms/INPUT_EVENT_RESPONSE_MS",
"payload/info/subsessionLength"], with_processes=True) \
.filter(ping_filter).cache()
total_session_length_hours = data .map(lambda p: p["payload/info/subsessionLength"]) .reduce(add) / 60 / 60
print "Input jank MTBF for {} channel, {} for {} days:".format(channel, startdate.strftime("%Y-%m-%d"), days)
print "Usage hours total: {}".format(total_session_length_hours)
aggregated_input_latency_chrome = data .map(lambda p: p.get("payload/histograms/INPUT_EVENT_RESPONSE_MS_parent", None)) .filter(lambda v: v is not None) .reduce(add)
chrome_histogram = Histogram("INPUT_EVENT_RESPONSE_MS", aggregated_input_latency_chrome)
print_histogram(chrome_histogram, "chrome", total_session_length_hours)
aggregated_input_latency_content = data .map(lambda p: p.get("payload/histograms/INPUT_EVENT_RESPONSE_MS_children", None)) .filter(lambda v: v is not None) .reduce(add)
content_histogram = Histogram("INPUT_EVENT_RESPONSE_MS", aggregated_input_latency_content)
print_histogram(content_histogram, "content", total_session_length_hours)
print
# Caculate mean time between failure (MTBF) of "hangs at least as long as X ms". Cut off at 150ms because for now that's not a failure case.
# In[8]:
input_latency_mtbf(date(2017, 4, 2), channel="nightly")
# In[9]:
input_latency_mtbf(date(2017, 4, 2), channel="beta")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment