Skip to content

Instantly share code, notes, and snippets.

@phobson
Created December 17, 2018 16:34
Show Gist options
  • Save phobson/2f54c0b8d299cd24193dc045990fe682 to your computer and use it in GitHub Desktop.
Save phobson/2f54c0b8d299cd24193dc045990fe682 to your computer and use it in GitHub Desktop.
json-based dockside
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"import requests\n",
"import pandas"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"def data_url(site, start, end, daily=False, fmt='json'):\n",
" dtfmt = '%Y-%m-%d'\n",
" url_base = \"https://waterservices.usgs.gov/nwis/{}\".format('dv' if daily else 'iv')\n",
" url_params = {\n",
" \"format\": fmt,\n",
" \"sites\": site,\n",
" \"startDT\": pandas.Timestamp(start).strftime(dtfmt),\n",
" \"endDT\": pandas.Timestamp(end).strftime(dtfmt),\n",
" }\n",
"\n",
" return requests.get(url_base, params=url_params)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"def expand_columns(df, names, sep='_'):\n",
" newcols = df.columns.str.split(sep, expand=True)\n",
" return (\n",
" df.set_axis(newcols, axis='columns', inplace=False)\n",
" .rename_axis(names, axis='columns')\n",
" )\n",
"\n",
"\n",
"def add_column_level(df, levelvalue, levelname):\n",
"\n",
" if isinstance(df.columns, pandas.MultiIndex):\n",
" raise ValueError('Dataframe already has MultiIndex on columns')\n",
"\n",
" origlevel = df.columns.names[0] or 'quantity'\n",
" return (\n",
" df.add_prefix(levelvalue + '_____')\n",
" .pipe(expand_columns, [levelname, origlevel], sep='_____')\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"def parse_ts(ts):\n",
" param = ts['variable']['variableName']\n",
" return (\n",
" pandas.DataFrame(ts['values'][0]['value'])\n",
" .rename(columns=lambda c: '_orig_' + c)\n",
" .assign(datetime=lambda df: pandas.to_datetime(df['_orig_dateTime']))\n",
" .assign(qual=lambda df: df['_orig_qualifiers'].map(lambda x: ','.join(x)))\n",
" .assign(value=lambda df: df['_orig_value'].astype(float))\n",
" .loc[:, lambda df: df.columns.map(lambda c: not c.startswith('_orig'))]\n",
" .set_index('datetime')\n",
" .rename_axis('var', axis='columns')\n",
" .pipe(add_column_level, param, 'parameter')\n",
" \n",
" )"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"site = '08071280'\n",
"\n",
"r = data_url(site, '2018-10-01', '2018-12-01', daily=True)\n",
"df = pandas.concat([parse_ts(ts) for ts in r.json()['value']['timeSeries']], axis='columns')"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead tr th {\n",
" text-align: left;\n",
" }\n",
"\n",
" .dataframe thead tr:last-of-type th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr>\n",
" <th>parameter</th>\n",
" <th colspan=\"2\" halign=\"left\">Streamflow, ft&amp;#179;/s</th>\n",
" <th colspan=\"2\" halign=\"left\">Gage height, ft</th>\n",
" </tr>\n",
" <tr>\n",
" <th>var</th>\n",
" <th>qual</th>\n",
" <th>value</th>\n",
" <th>qual</th>\n",
" <th>value</th>\n",
" </tr>\n",
" <tr>\n",
" <th>datetime</th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>2018-10-01 05:00:00</th>\n",
" <td>P</td>\n",
" <td>3.06</td>\n",
" <td>P</td>\n",
" <td>8.30</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2018-10-01 05:15:00</th>\n",
" <td>P</td>\n",
" <td>3.06</td>\n",
" <td>P</td>\n",
" <td>8.30</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2018-10-01 05:30:00</th>\n",
" <td>P</td>\n",
" <td>3.06</td>\n",
" <td>P</td>\n",
" <td>8.30</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2018-10-01 05:45:00</th>\n",
" <td>P</td>\n",
" <td>3.06</td>\n",
" <td>P</td>\n",
" <td>8.30</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2018-10-01 06:00:00</th>\n",
" <td>P</td>\n",
" <td>3.00</td>\n",
" <td>P</td>\n",
" <td>8.29</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2018-10-01 06:15:00</th>\n",
" <td>P</td>\n",
" <td>3.00</td>\n",
" <td>P</td>\n",
" <td>8.29</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2018-10-01 06:30:00</th>\n",
" <td>P</td>\n",
" <td>3.00</td>\n",
" <td>P</td>\n",
" <td>8.29</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2018-10-01 06:45:00</th>\n",
" <td>P</td>\n",
" <td>3.00</td>\n",
" <td>P</td>\n",
" <td>8.29</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2018-10-01 07:00:00</th>\n",
" <td>P</td>\n",
" <td>3.00</td>\n",
" <td>P</td>\n",
" <td>8.29</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2018-10-01 07:15:00</th>\n",
" <td>P</td>\n",
" <td>2.93</td>\n",
" <td>P</td>\n",
" <td>8.28</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
"parameter Streamflow, ft&#179;/s Gage height, ft \n",
"var qual value qual value\n",
"datetime \n",
"2018-10-01 05:00:00 P 3.06 P 8.30\n",
"2018-10-01 05:15:00 P 3.06 P 8.30\n",
"2018-10-01 05:30:00 P 3.06 P 8.30\n",
"2018-10-01 05:45:00 P 3.06 P 8.30\n",
"2018-10-01 06:00:00 P 3.00 P 8.29\n",
"2018-10-01 06:15:00 P 3.00 P 8.29\n",
"2018-10-01 06:30:00 P 3.00 P 8.29\n",
"2018-10-01 06:45:00 P 3.00 P 8.29\n",
"2018-10-01 07:00:00 P 3.00 P 8.29\n",
"2018-10-01 07:15:00 P 2.93 P 8.28"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df.head(10)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python [conda env:hydro]",
"language": "python",
"name": "conda-env-hydro-py"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment