Last active
March 17, 2020 19:58
-
-
Save ginward/50994388dfd5571dc557b97cc3f99bdf to your computer and use it in GitHub Desktop.
ckpt_xiu_monthly.ipynb
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"nbformat": 4, | |
"nbformat_minor": 0, | |
"metadata": { | |
"kernelspec": { | |
"name": "python3", | |
"display_name": "Python 3" | |
}, | |
"language_info": { | |
"codemirror_mode": { | |
"name": "ipython", | |
"version": 3 | |
}, | |
"file_extension": ".py", | |
"mimetype": "text/x-python", | |
"name": "python", | |
"nbconvert_exporter": "python", | |
"pygments_lexer": "ipython3", | |
"version": "3.7.1" | |
}, | |
"colab": { | |
"name": "ckpt_xiu_monthly.ipynb", | |
"provenance": [], | |
"collapsed_sections": [], | |
"toc_visible": true, | |
"machine_shape": "hm", | |
"include_colab_link": true | |
}, | |
"accelerator": "GPU" | |
}, | |
"cells": [ | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "view-in-github", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"<a href=\"https://colab.research.google.com/gist/ginward/50994388dfd5571dc557b97cc3f99bdf/ckpt_xiu_monthly.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "YX8cgPsuPUd6", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"# 1. Introduction" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "QCPSfZqhPUd9", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"This notebook is to replicate the main result of the paper: \n", | |
"Gu, Shihao, Bryan T. Kelly, and Dacheng Xiu. \"Autoencoder asset pricing models.\" Available at SSRN (2019).\n", | |
"\n", | |
"Please refer to the requirement.txt for environment configuration.\n", | |
"\n", | |
"This version also implements checkpointing." | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "2ia_JKmlPUd-", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"import numpy as np\n", | |
"import pandas as pd\n", | |
"import h5py\n", | |
"from matplotlib import pyplot as plt\n", | |
"from scipy import stats\n", | |
"import torch as t\n", | |
"from torch import nn\n", | |
"import os\n", | |
"from torch import optim\n", | |
"import torch.utils.data as Data\n", | |
"import warnings\n", | |
"warnings.filterwarnings(\"ignore\")\n", | |
"t.manual_seed(1)\n", | |
"t.cuda.manual_seed(1)" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "7xd1MpSTPUeB", | |
"colab_type": "code", | |
"outputId": "aad6d94d-8b23-453a-a479-d2de56b05f0d", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 34 | |
} | |
}, | |
"source": [ | |
"print(t.__version__)" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"1.4.0\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "6urp-yfyPUeF", | |
"colab_type": "code", | |
"outputId": "be912f96-9f7d-425e-9883-6819b09edb25", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 34 | |
} | |
}, | |
"source": [ | |
"print(t.version.cuda)" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"10.1\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "6z1BCbEcMG1w", | |
"colab_type": "code", | |
"outputId": "15f939ad-8b2e-475d-bf2a-aa37ca899bb2", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 306 | |
} | |
}, | |
"source": [ | |
"gpu_info = !nvidia-smi\n", | |
"gpu_info = '\\n'.join(gpu_info)\n", | |
"if gpu_info.find('failed') >= 0:\n", | |
" print('Select the Runtime → \"Change runtime type\" menu to enable a GPU accelerator, ')\n", | |
" print('and then re-execute this cell.')\n", | |
"else:\n", | |
" print(gpu_info)" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"Tue Mar 17 17:11:15 2020 \n", | |
"+-----------------------------------------------------------------------------+\n", | |
"| NVIDIA-SMI 440.59 Driver Version: 418.67 CUDA Version: 10.1 |\n", | |
"|-------------------------------+----------------------+----------------------+\n", | |
"| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n", | |
"| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n", | |
"|===============================+======================+======================|\n", | |
"| 0 Tesla P100-PCIE... Off | 00000000:00:04.0 Off | 0 |\n", | |
"| N/A 52C P0 28W / 250W | 0MiB / 16280MiB | 0% Default |\n", | |
"+-------------------------------+----------------------+----------------------+\n", | |
" \n", | |
"+-----------------------------------------------------------------------------+\n", | |
"| Processes: GPU Memory |\n", | |
"| GPU PID Type Process name Usage |\n", | |
"|=============================================================================|\n", | |
"| No running processes found |\n", | |
"+-----------------------------------------------------------------------------+\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "psJfSgLcPUeI", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"If you should run this code locally, you should make sure `t.cuda.is_available() == True`, otherwise you should install the right pytorch matches your cuda version.\n", | |
"If you want to run the code only on CPU, then please remove every `.cuda()` in the code." | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "RZbBFDRxPUeI", | |
"colab_type": "code", | |
"outputId": "c4c87845-a926-4dfd-9b8f-802ba76500d7", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 34 | |
} | |
}, | |
"source": [ | |
"t.cuda.is_available()" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/plain": [ | |
"True" | |
] | |
}, | |
"metadata": { | |
"tags": [] | |
}, | |
"execution_count": 5 | |
} | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "xma_fGFIPUeL", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"# 2. Data " | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "RZ6JEeHlPUeM", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"## 2.1 Overview" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "MrCl5DjBPUeN", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"We used the `datashare.csv` updated by the author of this paper" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "mPkbG5XaUf-h", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"# Connect with Google Drive\n", | |
"from google.colab import drive\n", | |
"drive.mount('/content/drive', force_remount = True)" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "yOBynFNbWKb9", | |
"colab_type": "code", | |
"outputId": "10f30eb8-75bc-478b-d43c-776037fe60d6", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 34 | |
} | |
}, | |
"source": [ | |
"cd drive/My Drive/Colab Notebooks" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"/content/drive/My Drive/Colab Notebooks\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "tzpgOXZuPUeO", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"data = pd.read_csv('./xiu_month_rf_hpr.csv')" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "FlMw8B41DpNN", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"#we don't want return to contain nan\n", | |
"data=data[data['return'].isnull()==False]" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "cT-3qO_MJtB0", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"#RF is in percentages\n", | |
"data['RF'] = data['RF']/100" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "vnrHUF45JSiG", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"data['return'] = data['return'] - data['RF']" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "PHeNtc0EJdzl", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"#drop the unnecessary columns\n", | |
"data = data.drop(columns = [\"MONTH\", \"RF\"])" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "CkBau5sjPUeR", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"#shift the return to future return\n", | |
"data['return']=data['return'].shift(periods=1)" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "V5C8-W_SMzN5", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"#we don't want return to contain nan\n", | |
"data=data[data['return'].isnull()==False]" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "XdzhwhSRQ9Z5", | |
"colab_type": "code", | |
"outputId": "510b5a42-5929-4d62-eb3a-c7350da05bbb", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 289 | |
} | |
}, | |
"source": [ | |
"data.columns" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/plain": [ | |
"Index(['permno', 'DATE', 'mvel1', 'beta', 'betasq', 'chmom', 'dolvol',\n", | |
" 'idiovol', 'indmom', 'mom1m', 'mom6m', 'mom12m', 'mom36m', 'pricedelay',\n", | |
" 'turn', 'absacc', 'acc', 'age', 'agr', 'bm', 'bm_ia', 'cashdebt',\n", | |
" 'cashpr', 'cfp', 'cfp_ia', 'chatoia', 'chcsho', 'chempia', 'chinv',\n", | |
" 'chpmia', 'convind', 'currat', 'depr', 'divi', 'divo', 'dy', 'egr',\n", | |
" 'ep', 'gma', 'grcapx', 'grltnoa', 'herf', 'hire', 'invest', 'lev',\n", | |
" 'lgr', 'mve_ia', 'operprof', 'orgcap', 'pchcapx_ia', 'pchcurrat',\n", | |
" 'pchdepr', 'pchgm_pchsale', 'pchquick', 'pchsale_pchinvt',\n", | |
" 'pchsale_pchrect', 'pchsale_pchxsga', 'pchsaleinv', 'pctacc', 'ps',\n", | |
" 'quick', 'rd', 'rd_mve', 'rd_sale', 'realestate', 'roic', 'salecash',\n", | |
" 'saleinv', 'salerec', 'secured', 'securedind', 'sgr', 'sin', 'sp',\n", | |
" 'tang', 'tb', 'aeavol', 'cash', 'chtx', 'cinvest', 'ear', 'nincr',\n", | |
" 'roaq', 'roavol', 'roeq', 'rsup', 'stdacc', 'stdcf', 'ms', 'baspread',\n", | |
" 'ill', 'maxret', 'retvol', 'std_dolvol', 'std_turn', 'zerotrade',\n", | |
" 'sic2', 'PERMNO', 'date', 'return'],\n", | |
" dtype='object')" | |
] | |
}, | |
"metadata": { | |
"tags": [] | |
}, | |
"execution_count": 15 | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "ZFfYUw7jPUeT", | |
"colab_type": "code", | |
"outputId": "4b2c9394-7fc7-4705-a7c5-604855e8f89f", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 34 | |
} | |
}, | |
"source": [ | |
"data.shape" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/plain": [ | |
"(3739448, 100)" | |
] | |
}, | |
"metadata": { | |
"tags": [] | |
}, | |
"execution_count": 16 | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "0KhP6wl_y5Pu", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "5VLqDjQqPUeZ", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"summary = data.describe()" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "w1hc8hFaPUeb", | |
"colab_type": "code", | |
"outputId": "0b16b58c-88f6-4c81-c198-eb5a92b85ea5", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 397 | |
} | |
}, | |
"source": [ | |
"summary" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/html": [ | |
"<div>\n", | |
"<style scoped>\n", | |
" .dataframe tbody tr th:only-of-type {\n", | |
" vertical-align: middle;\n", | |
" }\n", | |
"\n", | |
" .dataframe tbody tr th {\n", | |
" vertical-align: top;\n", | |
" }\n", | |
"\n", | |
" .dataframe thead th {\n", | |
" text-align: right;\n", | |
" }\n", | |
"</style>\n", | |
"<table border=\"1\" class=\"dataframe\">\n", | |
" <thead>\n", | |
" <tr style=\"text-align: right;\">\n", | |
" <th></th>\n", | |
" <th>permno</th>\n", | |
" <th>DATE</th>\n", | |
" <th>mvel1</th>\n", | |
" <th>beta</th>\n", | |
" <th>betasq</th>\n", | |
" <th>chmom</th>\n", | |
" <th>dolvol</th>\n", | |
" <th>idiovol</th>\n", | |
" <th>indmom</th>\n", | |
" <th>mom1m</th>\n", | |
" <th>mom6m</th>\n", | |
" <th>mom12m</th>\n", | |
" <th>mom36m</th>\n", | |
" <th>pricedelay</th>\n", | |
" <th>turn</th>\n", | |
" <th>absacc</th>\n", | |
" <th>acc</th>\n", | |
" <th>age</th>\n", | |
" <th>agr</th>\n", | |
" <th>bm</th>\n", | |
" <th>bm_ia</th>\n", | |
" <th>cashdebt</th>\n", | |
" <th>cashpr</th>\n", | |
" <th>cfp</th>\n", | |
" <th>cfp_ia</th>\n", | |
" <th>chatoia</th>\n", | |
" <th>chcsho</th>\n", | |
" <th>chempia</th>\n", | |
" <th>chinv</th>\n", | |
" <th>chpmia</th>\n", | |
" <th>convind</th>\n", | |
" <th>currat</th>\n", | |
" <th>depr</th>\n", | |
" <th>divi</th>\n", | |
" <th>divo</th>\n", | |
" <th>dy</th>\n", | |
" <th>egr</th>\n", | |
" <th>ep</th>\n", | |
" <th>gma</th>\n", | |
" <th>grcapx</th>\n", | |
" <th>...</th>\n", | |
" <th>ps</th>\n", | |
" <th>quick</th>\n", | |
" <th>rd</th>\n", | |
" <th>rd_mve</th>\n", | |
" <th>rd_sale</th>\n", | |
" <th>realestate</th>\n", | |
" <th>roic</th>\n", | |
" <th>salecash</th>\n", | |
" <th>saleinv</th>\n", | |
" <th>salerec</th>\n", | |
" <th>secured</th>\n", | |
" <th>securedind</th>\n", | |
" <th>sgr</th>\n", | |
" <th>sin</th>\n", | |
" <th>sp</th>\n", | |
" <th>tang</th>\n", | |
" <th>tb</th>\n", | |
" <th>aeavol</th>\n", | |
" <th>cash</th>\n", | |
" <th>chtx</th>\n", | |
" <th>cinvest</th>\n", | |
" <th>ear</th>\n", | |
" <th>nincr</th>\n", | |
" <th>roaq</th>\n", | |
" <th>roavol</th>\n", | |
" <th>roeq</th>\n", | |
" <th>rsup</th>\n", | |
" <th>stdacc</th>\n", | |
" <th>stdcf</th>\n", | |
" <th>ms</th>\n", | |
" <th>baspread</th>\n", | |
" <th>ill</th>\n", | |
" <th>maxret</th>\n", | |
" <th>retvol</th>\n", | |
" <th>std_dolvol</th>\n", | |
" <th>std_turn</th>\n", | |
" <th>zerotrade</th>\n", | |
" <th>sic2</th>\n", | |
" <th>PERMNO</th>\n", | |
" <th>return</th>\n", | |
" </tr>\n", | |
" </thead>\n", | |
" <tbody>\n", | |
" <tr>\n", | |
" <th>count</th>\n", | |
" <td>3.739448e+06</td>\n", | |
" <td>3.739448e+06</td>\n", | |
" <td>3.736495e+06</td>\n", | |
" <td>3.371274e+06</td>\n", | |
" <td>3.371274e+06</td>\n", | |
" <td>3.428516e+06</td>\n", | |
" <td>3.393674e+06</td>\n", | |
" <td>3.371274e+06</td>\n", | |
" <td>3.739335e+06</td>\n", | |
" <td>3.710613e+06</td>\n", | |
" <td>3.596218e+06</td>\n", | |
" <td>3.428516e+06</td>\n", | |
" <td>2.836227e+06</td>\n", | |
" <td>3.371202e+06</td>\n", | |
" <td>3.395522e+06</td>\n", | |
" <td>2.315500e+06</td>\n", | |
" <td>2.315500e+06</td>\n", | |
" <td>2.819051e+06</td>\n", | |
" <td>2.584590e+06</td>\n", | |
" <td>2.770186e+06</td>\n", | |
" <td>2.770186e+06</td>\n", | |
" <td>2.675175e+06</td>\n", | |
" <td>2.773472e+06</td>\n", | |
" <td>2.498440e+06</td>\n", | |
" <td>2.498440e+06</td>\n", | |
" <td>2.344492e+06</td>\n", | |
" <td>2.580992e+06</td>\n", | |
" <td>2.551149e+06</td>\n", | |
" <td>2.501191e+06</td>\n", | |
" <td>2.544197e+06</td>\n", | |
" <td>2.819051e+06</td>\n", | |
" <td>2.697751e+06</td>\n", | |
" <td>2.623932e+06</td>\n", | |
" <td>2.584633e+06</td>\n", | |
" <td>2.584633e+06</td>\n", | |
" <td>2.805990e+06</td>\n", | |
" <td>2.557353e+06</td>\n", | |
" <td>2.816012e+06</td>\n", | |
" <td>2.578034e+06</td>\n", | |
" <td>2.223305e+06</td>\n", | |
" <td>...</td>\n", | |
" <td>2.584633e+06</td>\n", | |
" <td>2.670802e+06</td>\n", | |
" <td>2.584633e+06</td>\n", | |
" <td>1.322812e+06</td>\n", | |
" <td>1.305316e+06</td>\n", | |
" <td>945405.000000</td>\n", | |
" <td>2.675117e+06</td>\n", | |
" <td>2.781557e+06</td>\n", | |
" <td>2.185371e+06</td>\n", | |
" <td>2.694111e+06</td>\n", | |
" <td>1.428519e+06</td>\n", | |
" <td>2.819051e+06</td>\n", | |
" <td>2.549270e+06</td>\n", | |
" <td>2.819051e+06</td>\n", | |
" <td>2.808559e+06</td>\n", | |
" <td>2.636985e+06</td>\n", | |
" <td>2.485428e+06</td>\n", | |
" <td>2.213614e+06</td>\n", | |
" <td>2.138232e+06</td>\n", | |
" <td>2.050588e+06</td>\n", | |
" <td>2.028068e+06</td>\n", | |
" <td>2.241263e+06</td>\n", | |
" <td>2.243545e+06</td>\n", | |
" <td>2.159743e+06</td>\n", | |
" <td>1.842151e+06</td>\n", | |
" <td>2.220430e+06</td>\n", | |
" <td>2.180953e+06</td>\n", | |
" <td>1.427430e+06</td>\n", | |
" <td>1.427430e+06</td>\n", | |
" <td>2.180630e+06</td>\n", | |
" <td>3.738812e+06</td>\n", | |
" <td>3.432781e+06</td>\n", | |
" <td>3.738908e+06</td>\n", | |
" <td>3.736341e+06</td>\n", | |
" <td>3.425130e+06</td>\n", | |
" <td>3.435372e+06</td>\n", | |
" <td>3.431336e+06</td>\n", | |
" <td>3.485191e+06</td>\n", | |
" <td>3.739448e+06</td>\n", | |
" <td>3.739448e+06</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>mean</th>\n", | |
" <td>5.640640e+04</td>\n", | |
" <td>1.992516e+07</td>\n", | |
" <td>1.079295e+06</td>\n", | |
" <td>1.009825e+00</td>\n", | |
" <td>1.443811e+00</td>\n", | |
" <td>1.390837e-03</td>\n", | |
" <td>1.079274e+01</td>\n", | |
" <td>6.043689e-02</td>\n", | |
" <td>1.245873e-01</td>\n", | |
" <td>9.164804e-03</td>\n", | |
" <td>4.885550e-02</td>\n", | |
" <td>1.176537e-01</td>\n", | |
" <td>3.139641e-01</td>\n", | |
" <td>1.566428e-01</td>\n", | |
" <td>1.004903e+00</td>\n", | |
" <td>9.028293e-02</td>\n", | |
" <td>-2.009567e-02</td>\n", | |
" <td>1.126533e+01</td>\n", | |
" <td>-1.570820e-01</td>\n", | |
" <td>2.492919e+00</td>\n", | |
" <td>-5.026388e-01</td>\n", | |
" <td>2.821344e-02</td>\n", | |
" <td>-5.203027e-01</td>\n", | |
" <td>7.714399e-02</td>\n", | |
" <td>-1.520239e-01</td>\n", | |
" <td>-4.727783e-04</td>\n", | |
" <td>1.137399e-01</td>\n", | |
" <td>-9.211914e-02</td>\n", | |
" <td>1.274625e-02</td>\n", | |
" <td>1.734495e-01</td>\n", | |
" <td>1.403582e-01</td>\n", | |
" <td>3.738560e+00</td>\n", | |
" <td>2.546599e-01</td>\n", | |
" <td>3.110654e-02</td>\n", | |
" <td>3.034048e-02</td>\n", | |
" <td>2.094585e-02</td>\n", | |
" <td>1.408368e-01</td>\n", | |
" <td>-1.709670e-02</td>\n", | |
" <td>3.574233e-01</td>\n", | |
" <td>9.058863e-01</td>\n", | |
" <td>...</td>\n", | |
" <td>4.167191e+00</td>\n", | |
" <td>3.047020e+00</td>\n", | |
" <td>1.267689e-01</td>\n", | |
" <td>5.891470e-02</td>\n", | |
" <td>4.931038e-01</td>\n", | |
" <td>0.261367</td>\n", | |
" <td>-6.433974e-02</td>\n", | |
" <td>4.996550e+01</td>\n", | |
" <td>2.695078e+01</td>\n", | |
" <td>1.158898e+01</td>\n", | |
" <td>5.621686e-01</td>\n", | |
" <td>4.031154e-01</td>\n", | |
" <td>1.914831e-01</td>\n", | |
" <td>8.879584e-03</td>\n", | |
" <td>2.215269e+00</td>\n", | |
" <td>5.380046e-01</td>\n", | |
" <td>-1.630156e-01</td>\n", | |
" <td>8.094319e-01</td>\n", | |
" <td>1.569021e-01</td>\n", | |
" <td>1.008276e-03</td>\n", | |
" <td>2.018008e-01</td>\n", | |
" <td>3.227083e-03</td>\n", | |
" <td>1.015102e+00</td>\n", | |
" <td>3.200990e-04</td>\n", | |
" <td>2.630615e-02</td>\n", | |
" <td>7.392625e-03</td>\n", | |
" <td>1.921570e-02</td>\n", | |
" <td>6.254564e+00</td>\n", | |
" <td>1.277835e+01</td>\n", | |
" <td>3.625508e+00</td>\n", | |
" <td>5.400234e-02</td>\n", | |
" <td>5.082129e-06</td>\n", | |
" <td>6.982922e-02</td>\n", | |
" <td>3.062196e-02</td>\n", | |
" <td>8.671846e-01</td>\n", | |
" <td>4.074175e+00</td>\n", | |
" <td>1.509971e+00</td>\n", | |
" <td>4.741589e+01</td>\n", | |
" <td>5.640640e+04</td>\n", | |
" <td>7.267635e-03</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>std</th>\n", | |
" <td>2.732097e+04</td>\n", | |
" <td>1.420988e+05</td>\n", | |
" <td>4.825887e+06</td>\n", | |
" <td>6.480901e-01</td>\n", | |
" <td>1.723233e+00</td>\n", | |
" <td>5.388213e-01</td>\n", | |
" <td>2.922874e+00</td>\n", | |
" <td>3.660089e-02</td>\n", | |
" <td>2.861385e-01</td>\n", | |
" <td>1.473740e-01</td>\n", | |
" <td>3.501918e-01</td>\n", | |
" <td>5.601429e-01</td>\n", | |
" <td>9.156545e-01</td>\n", | |
" <td>1.353978e+00</td>\n", | |
" <td>1.047787e+01</td>\n", | |
" <td>9.846507e-02</td>\n", | |
" <td>1.272371e-01</td>\n", | |
" <td>1.009857e+01</td>\n", | |
" <td>4.204604e-01</td>\n", | |
" <td>2.676182e+01</td>\n", | |
" <td>2.446237e+01</td>\n", | |
" <td>1.543410e+01</td>\n", | |
" <td>8.348633e+01</td>\n", | |
" <td>1.332607e+00</td>\n", | |
" <td>6.283583e+00</td>\n", | |
" <td>2.192646e-01</td>\n", | |
" <td>3.242773e-01</td>\n", | |
" <td>9.348005e-01</td>\n", | |
" <td>5.621459e-02</td>\n", | |
" <td>5.779889e+00</td>\n", | |
" <td>3.473584e-01</td>\n", | |
" <td>1.230473e+01</td>\n", | |
" <td>3.934959e-01</td>\n", | |
" <td>1.736057e-01</td>\n", | |
" <td>1.715224e-01</td>\n", | |
" <td>3.718645e-02</td>\n", | |
" <td>6.511498e-01</td>\n", | |
" <td>3.566570e-01</td>\n", | |
" <td>3.356517e-01</td>\n", | |
" <td>4.528311e+00</td>\n", | |
" <td>...</td>\n", | |
" <td>1.715702e+00</td>\n", | |
" <td>1.230901e+01</td>\n", | |
" <td>3.327139e-01</td>\n", | |
" <td>1.040411e-01</td>\n", | |
" <td>3.791595e+00</td>\n", | |
" <td>0.281477</td>\n", | |
" <td>8.379559e-01</td>\n", | |
" <td>1.559034e+02</td>\n", | |
" <td>7.878391e+01</td>\n", | |
" <td>5.241357e+01</td>\n", | |
" <td>5.224417e-01</td>\n", | |
" <td>4.905237e-01</td>\n", | |
" <td>5.803090e-01</td>\n", | |
" <td>9.381226e-02</td>\n", | |
" <td>3.611889e+00</td>\n", | |
" <td>1.533588e-01</td>\n", | |
" <td>1.809665e+00</td>\n", | |
" <td>2.065480e+00</td>\n", | |
" <td>2.028530e-01</td>\n", | |
" <td>1.150926e-02</td>\n", | |
" <td>2.492995e+01</td>\n", | |
" <td>7.901209e-02</td>\n", | |
" <td>1.370279e+00</td>\n", | |
" <td>5.178176e-02</td>\n", | |
" <td>4.758519e-02</td>\n", | |
" <td>2.096277e-01</td>\n", | |
" <td>2.142203e-01</td>\n", | |
" <td>7.498682e+01</td>\n", | |
" <td>1.355331e+02</td>\n", | |
" <td>1.674303e+00</td>\n", | |
" <td>7.253443e-02</td>\n", | |
" <td>2.663448e-05</td>\n", | |
" <td>7.038525e-02</td>\n", | |
" <td>2.528540e-02</td>\n", | |
" <td>4.007274e-01</td>\n", | |
" <td>9.178549e+00</td>\n", | |
" <td>3.536545e+00</td>\n", | |
" <td>1.970392e+01</td>\n", | |
" <td>2.732097e+04</td>\n", | |
" <td>1.728344e-01</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>min</th>\n", | |
" <td>1.000000e+04</td>\n", | |
" <td>1.957033e+07</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-1.933279e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-9.062534e+00</td>\n", | |
" <td>-3.060271e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-7.794856e-01</td>\n", | |
" <td>-6.981132e-01</td>\n", | |
" <td>-1.000000e+00</td>\n", | |
" <td>-1.000000e+00</td>\n", | |
" <td>-1.000000e+00</td>\n", | |
" <td>-8.229368e+02</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-1.257607e+00</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>-6.033304e+00</td>\n", | |
" <td>-2.439120e+01</td>\n", | |
" <td>-7.759533e+02</td>\n", | |
" <td>-1.024800e+04</td>\n", | |
" <td>-8.364906e+03</td>\n", | |
" <td>-2.057420e+02</td>\n", | |
" <td>-1.912332e+02</td>\n", | |
" <td>-1.314998e+00</td>\n", | |
" <td>-8.998305e-01</td>\n", | |
" <td>-5.622415e+01</td>\n", | |
" <td>-3.003145e-01</td>\n", | |
" <td>-1.574528e+02</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>1.418063e-03</td>\n", | |
" <td>-9.838288e-01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-3.313476e+00</td>\n", | |
" <td>-6.199143e+00</td>\n", | |
" <td>-1.821396e+01</td>\n", | |
" <td>-9.219229e-01</td>\n", | |
" <td>-4.047032e+02</td>\n", | |
" <td>...</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>1.418063e-03</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-3.411239e-02</td>\n", | |
" <td>-9.038462e+01</td>\n", | |
" <td>0.000000</td>\n", | |
" <td>-1.717197e+01</td>\n", | |
" <td>-1.591636e+03</td>\n", | |
" <td>-1.066224e+02</td>\n", | |
" <td>-2.179600e+04</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-1.000000e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-3.594196e+01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-4.219112e+01</td>\n", | |
" <td>-1.000000e+00</td>\n", | |
" <td>-1.431838e-01</td>\n", | |
" <td>-1.607357e-01</td>\n", | |
" <td>-2.833333e+03</td>\n", | |
" <td>-4.754902e-01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-5.334442e-01</td>\n", | |
" <td>4.202608e-06</td>\n", | |
" <td>-1.720769e+02</td>\n", | |
" <td>-3.343933e+01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>5.354650e-06</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-1.979275e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-7.335907e-02</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>1.954854e-12</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>1.000000e+04</td>\n", | |
" <td>-9.912950e-01</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>25%</th>\n", | |
" <td>3.052500e+04</td>\n", | |
" <td>1.982093e+07</td>\n", | |
" <td>2.032800e+04</td>\n", | |
" <td>5.400761e-01</td>\n", | |
" <td>2.975018e-01</td>\n", | |
" <td>-2.380378e-01</td>\n", | |
" <td>8.760727e+00</td>\n", | |
" <td>3.428336e-02</td>\n", | |
" <td>-4.939807e-02</td>\n", | |
" <td>-6.144062e-02</td>\n", | |
" <td>-1.355932e-01</td>\n", | |
" <td>-1.919505e-01</td>\n", | |
" <td>-2.126540e-01</td>\n", | |
" <td>-5.269740e-02</td>\n", | |
" <td>1.900418e-01</td>\n", | |
" <td>2.969798e-02</td>\n", | |
" <td>-7.354222e-02</td>\n", | |
" <td>4.000000e+00</td>\n", | |
" <td>-2.030585e-01</td>\n", | |
" <td>3.376279e-01</td>\n", | |
" <td>-4.110030e-01</td>\n", | |
" <td>1.410281e-02</td>\n", | |
" <td>-8.480907e+00</td>\n", | |
" <td>-4.018772e-02</td>\n", | |
" <td>-1.441469e-01</td>\n", | |
" <td>-7.099654e-02</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-1.804556e-01</td>\n", | |
" <td>-1.534082e-03</td>\n", | |
" <td>-1.304695e-01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>1.228087e+00</td>\n", | |
" <td>9.547563e-02</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-2.729350e-02</td>\n", | |
" <td>-2.553201e-03</td>\n", | |
" <td>1.143995e-01</td>\n", | |
" <td>-3.721840e-01</td>\n", | |
" <td>...</td>\n", | |
" <td>3.000000e+00</td>\n", | |
" <td>8.793050e-01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>3.133341e-03</td>\n", | |
" <td>2.423276e-03</td>\n", | |
" <td>0.107750</td>\n", | |
" <td>6.581906e-03</td>\n", | |
" <td>2.635117e+00</td>\n", | |
" <td>4.606254e+00</td>\n", | |
" <td>3.735176e+00</td>\n", | |
" <td>1.497460e-02</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>-1.065269e-02</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>4.256670e-01</td>\n", | |
" <td>4.697502e-01</td>\n", | |
" <td>-7.153101e-01</td>\n", | |
" <td>-2.646674e-01</td>\n", | |
" <td>2.256222e-02</td>\n", | |
" <td>-1.279341e-03</td>\n", | |
" <td>-2.838249e-02</td>\n", | |
" <td>-3.225806e-02</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>1.322650e-05</td>\n", | |
" <td>5.141335e-03</td>\n", | |
" <td>8.120633e-04</td>\n", | |
" <td>-4.400480e-03</td>\n", | |
" <td>7.941321e-02</td>\n", | |
" <td>8.458948e-02</td>\n", | |
" <td>2.000000e+00</td>\n", | |
" <td>1.931990e-02</td>\n", | |
" <td>1.070679e-08</td>\n", | |
" <td>2.777778e-02</td>\n", | |
" <td>1.444932e-02</td>\n", | |
" <td>5.540967e-01</td>\n", | |
" <td>7.449102e-01</td>\n", | |
" <td>2.009551e-08</td>\n", | |
" <td>3.400000e+01</td>\n", | |
" <td>3.052500e+04</td>\n", | |
" <td>-6.532400e-02</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>50%</th>\n", | |
" <td>6.177800e+04</td>\n", | |
" <td>1.994063e+07</td>\n", | |
" <td>8.572500e+04</td>\n", | |
" <td>9.384522e-01</td>\n", | |
" <td>8.842564e-01</td>\n", | |
" <td>-4.718135e-03</td>\n", | |
" <td>1.066154e+01</td>\n", | |
" <td>5.120070e-02</td>\n", | |
" <td>1.042282e-01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>2.208386e-02</td>\n", | |
" <td>5.331414e-02</td>\n", | |
" <td>1.520912e-01</td>\n", | |
" <td>6.873652e-02</td>\n", | |
" <td>4.587454e-01</td>\n", | |
" <td>6.230405e-02</td>\n", | |
" <td>-1.916127e-02</td>\n", | |
" <td>8.000000e+00</td>\n", | |
" <td>-7.571500e-02</td>\n", | |
" <td>6.494110e-01</td>\n", | |
" <td>-1.083135e-01</td>\n", | |
" <td>1.287089e-01</td>\n", | |
" <td>-6.661708e-01</td>\n", | |
" <td>4.781421e-02</td>\n", | |
" <td>-6.330727e-03</td>\n", | |
" <td>1.589537e-03</td>\n", | |
" <td>6.847698e-03</td>\n", | |
" <td>-5.982460e-02</td>\n", | |
" <td>2.894090e-04</td>\n", | |
" <td>-3.168992e-03</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>1.978973e+00</td>\n", | |
" <td>1.490439e-01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>4.161961e-03</td>\n", | |
" <td>7.802550e-02</td>\n", | |
" <td>5.058908e-02</td>\n", | |
" <td>3.006515e-01</td>\n", | |
" <td>1.338296e-01</td>\n", | |
" <td>...</td>\n", | |
" <td>4.000000e+00</td>\n", | |
" <td>1.296124e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>2.496483e-02</td>\n", | |
" <td>2.425175e-02</td>\n", | |
" <td>0.227216</td>\n", | |
" <td>6.736905e-02</td>\n", | |
" <td>1.005086e+01</td>\n", | |
" <td>7.609959e+00</td>\n", | |
" <td>5.895310e+00</td>\n", | |
" <td>5.376000e-01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>9.561810e-02</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>1.033256e+00</td>\n", | |
" <td>5.477468e-01</td>\n", | |
" <td>-1.265953e-01</td>\n", | |
" <td>2.550494e-01</td>\n", | |
" <td>6.937483e-02</td>\n", | |
" <td>6.297230e-05</td>\n", | |
" <td>-1.184269e-03</td>\n", | |
" <td>5.297698e-04</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>7.819408e-03</td>\n", | |
" <td>1.134994e-02</td>\n", | |
" <td>2.398843e-02</td>\n", | |
" <td>1.446924e-02</td>\n", | |
" <td>1.317511e-01</td>\n", | |
" <td>1.436099e-01</td>\n", | |
" <td>4.000000e+00</td>\n", | |
" <td>3.276041e-02</td>\n", | |
" <td>1.291728e-07</td>\n", | |
" <td>4.878049e-02</td>\n", | |
" <td>2.349544e-02</td>\n", | |
" <td>8.029089e-01</td>\n", | |
" <td>1.695987e+00</td>\n", | |
" <td>5.636672e-08</td>\n", | |
" <td>4.800000e+01</td>\n", | |
" <td>6.177800e+04</td>\n", | |
" <td>-3.337000e-03</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>75%</th>\n", | |
" <td>8.068000e+04</td>\n", | |
" <td>2.004023e+07</td>\n", | |
" <td>4.118188e+05</td>\n", | |
" <td>1.388240e+00</td>\n", | |
" <td>1.930812e+00</td>\n", | |
" <td>2.335132e-01</td>\n", | |
" <td>1.278733e+01</td>\n", | |
" <td>7.686123e-02</td>\n", | |
" <td>2.611684e-01</td>\n", | |
" <td>6.666667e-02</td>\n", | |
" <td>1.813771e-01</td>\n", | |
" <td>3.071035e-01</td>\n", | |
" <td>5.797258e-01</td>\n", | |
" <td>3.222820e-01</td>\n", | |
" <td>1.087716e+00</td>\n", | |
" <td>1.141448e-01</td>\n", | |
" <td>4.696611e-02</td>\n", | |
" <td>1.600000e+01</td>\n", | |
" <td>1.586623e-02</td>\n", | |
" <td>1.124785e+00</td>\n", | |
" <td>2.505711e-01</td>\n", | |
" <td>2.820772e-01</td>\n", | |
" <td>4.930041e+00</td>\n", | |
" <td>1.235594e-01</td>\n", | |
" <td>8.755702e-02</td>\n", | |
" <td>7.816332e-02</td>\n", | |
" <td>6.497040e-02</td>\n", | |
" <td>2.976349e-02</td>\n", | |
" <td>2.251888e-02</td>\n", | |
" <td>5.664668e-02</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>3.209117e+00</td>\n", | |
" <td>2.630835e-01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>3.052534e-02</td>\n", | |
" <td>1.954101e-01</td>\n", | |
" <td>8.943522e-02</td>\n", | |
" <td>5.261533e-01</td>\n", | |
" <td>9.173273e-01</td>\n", | |
" <td>...</td>\n", | |
" <td>5.000000e+00</td>\n", | |
" <td>2.282214e+00</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>6.950250e-02</td>\n", | |
" <td>9.843776e-02</td>\n", | |
" <td>0.375574</td>\n", | |
" <td>1.337349e-01</td>\n", | |
" <td>3.565057e+01</td>\n", | |
" <td>1.717171e+01</td>\n", | |
" <td>8.917961e+00</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>2.371986e-01</td>\n", | |
" <td>0.000000e+00</td>\n", | |
" <td>2.435432e+00</td>\n", | |
" <td>6.116974e-01</td>\n", | |
" <td>3.853083e-01</td>\n", | |
" <td>1.105991e+00</td>\n", | |
" <td>2.069173e-01</td>\n", | |
" <td>3.174626e-03</td>\n", | |
" <td>2.111255e-02</td>\n", | |
" <td>3.675214e-02</td>\n", | |
" <td>2.000000e+00</td>\n", | |
" <td>1.978208e-02</td>\n", | |
" <td>2.711450e-02</td>\n", | |
" <td>4.376447e-02</td>\n", | |
" <td>5.448232e-02</td>\n", | |
" <td>2.518106e-01</td>\n", | |
" <td>2.943518e-01</td>\n", | |
" <td>5.000000e+00</td>\n", | |
" <td>5.894172e-02</td>\n", | |
" <td>1.264674e-06</td>\n", | |
" <td>8.571429e-02</td>\n", | |
" <td>3.830652e-02</td>\n", | |
" <td>1.118145e+00</td>\n", | |
" <td>3.927887e+00</td>\n", | |
" <td>9.545456e-01</td>\n", | |
" <td>6.300000e+01</td>\n", | |
" <td>8.068000e+04</td>\n", | |
" <td>6.276325e-02</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>max</th>\n", | |
" <td>9.343600e+04</td>\n", | |
" <td>2.016123e+07</td>\n", | |
" <td>1.221146e+08</td>\n", | |
" <td>3.987207e+00</td>\n", | |
" <td>1.589782e+01</td>\n", | |
" <td>8.083012e+00</td>\n", | |
" <td>1.900568e+01</td>\n", | |
" <td>2.720346e-01</td>\n", | |
" <td>7.682736e+00</td>\n", | |
" <td>2.172414e+00</td>\n", | |
" <td>7.844445e+00</td>\n", | |
" <td>1.168096e+01</td>\n", | |
" <td>1.685246e+01</td>\n", | |
" <td>9.918375e+02</td>\n", | |
" <td>1.734793e+04</td>\n", | |
" <td>1.257607e+00</td>\n", | |
" <td>1.155264e+00</td>\n", | |
" <td>5.400000e+01</td>\n", | |
" <td>8.265002e-01</td>\n", | |
" <td>2.460433e+03</td>\n", | |
" <td>1.564199e+03</td>\n", | |
" <td>4.256410e+01</td>\n", | |
" <td>1.139067e+04</td>\n", | |
" <td>1.477503e+02</td>\n", | |
" <td>3.657082e+02</td>\n", | |
" <td>2.663142e+00</td>\n", | |
" <td>7.208766e+00</td>\n", | |
" <td>6.629468e+01</td>\n", | |
" <td>3.991813e-01</td>\n", | |
" <td>1.530145e+02</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>8.330556e+02</td>\n", | |
" <td>1.509737e+01</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>1.159430e+00</td>\n", | |
" <td>1.385515e+01</td>\n", | |
" <td>8.404797e-01</td>\n", | |
" <td>6.138484e+00</td>\n", | |
" <td>4.866364e+02</td>\n", | |
" <td>...</td>\n", | |
" <td>9.000000e+00</td>\n", | |
" <td>8.330556e+02</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>2.370752e+00</td>\n", | |
" <td>1.584300e+02</td>\n", | |
" <td>57.709343</td>\n", | |
" <td>1.047405e+01</td>\n", | |
" <td>5.906600e+03</td>\n", | |
" <td>3.024533e+03</td>\n", | |
" <td>6.870769e+02</td>\n", | |
" <td>5.000000e+00</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>1.791538e+01</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>5.493151e+01</td>\n", | |
" <td>1.000000e+00</td>\n", | |
" <td>2.924935e+01</td>\n", | |
" <td>5.121570e+02</td>\n", | |
" <td>9.786834e-01</td>\n", | |
" <td>1.765701e-01</td>\n", | |
" <td>6.463467e+03</td>\n", | |
" <td>5.578913e-01</td>\n", | |
" <td>8.000000e+00</td>\n", | |
" <td>1.460327e+00</td>\n", | |
" <td>8.201621e-01</td>\n", | |
" <td>7.039832e+00</td>\n", | |
" <td>5.670456e+00</td>\n", | |
" <td>7.496989e+03</td>\n", | |
" <td>8.751301e+03</td>\n", | |
" <td>8.000000e+00</td>\n", | |
" <td>1.078788e+00</td>\n", | |
" <td>2.479339e-03</td>\n", | |
" <td>1.701520e+00</td>\n", | |
" <td>4.379252e-01</td>\n", | |
" <td>2.860123e+00</td>\n", | |
" <td>6.517474e+02</td>\n", | |
" <td>2.008697e+01</td>\n", | |
" <td>9.900000e+01</td>\n", | |
" <td>9.343600e+04</td>\n", | |
" <td>2.399660e+01</td>\n", | |
" </tr>\n", | |
" </tbody>\n", | |
"</table>\n", | |
"<p>8 rows × 99 columns</p>\n", | |
"</div>" | |
], | |
"text/plain": [ | |
" permno DATE ... PERMNO return\n", | |
"count 3.739448e+06 3.739448e+06 ... 3.739448e+06 3.739448e+06\n", | |
"mean 5.640640e+04 1.992516e+07 ... 5.640640e+04 7.267635e-03\n", | |
"std 2.732097e+04 1.420988e+05 ... 2.732097e+04 1.728344e-01\n", | |
"min 1.000000e+04 1.957033e+07 ... 1.000000e+04 -9.912950e-01\n", | |
"25% 3.052500e+04 1.982093e+07 ... 3.052500e+04 -6.532400e-02\n", | |
"50% 6.177800e+04 1.994063e+07 ... 6.177800e+04 -3.337000e-03\n", | |
"75% 8.068000e+04 2.004023e+07 ... 8.068000e+04 6.276325e-02\n", | |
"max 9.343600e+04 2.016123e+07 ... 9.343600e+04 2.399660e+01\n", | |
"\n", | |
"[8 rows x 99 columns]" | |
] | |
}, | |
"metadata": { | |
"tags": [] | |
}, | |
"execution_count": 18 | |
} | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "mPHNtLRMPUed", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"Different stocks have different record #" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "EJTPsQh3PUee", | |
"colab_type": "code", | |
"outputId": "0ddcc628-a8a7-488a-f837-bd8f7b2a487d", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 470 | |
} | |
}, | |
"source": [ | |
"data.groupby('permno').count()" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/html": [ | |
"<div>\n", | |
"<style scoped>\n", | |
" .dataframe tbody tr th:only-of-type {\n", | |
" vertical-align: middle;\n", | |
" }\n", | |
"\n", | |
" .dataframe tbody tr th {\n", | |
" vertical-align: top;\n", | |
" }\n", | |
"\n", | |
" .dataframe thead th {\n", | |
" text-align: right;\n", | |
" }\n", | |
"</style>\n", | |
"<table border=\"1\" class=\"dataframe\">\n", | |
" <thead>\n", | |
" <tr style=\"text-align: right;\">\n", | |
" <th></th>\n", | |
" <th>DATE</th>\n", | |
" <th>mvel1</th>\n", | |
" <th>beta</th>\n", | |
" <th>betasq</th>\n", | |
" <th>chmom</th>\n", | |
" <th>dolvol</th>\n", | |
" <th>idiovol</th>\n", | |
" <th>indmom</th>\n", | |
" <th>mom1m</th>\n", | |
" <th>mom6m</th>\n", | |
" <th>mom12m</th>\n", | |
" <th>mom36m</th>\n", | |
" <th>pricedelay</th>\n", | |
" <th>turn</th>\n", | |
" <th>absacc</th>\n", | |
" <th>acc</th>\n", | |
" <th>age</th>\n", | |
" <th>agr</th>\n", | |
" <th>bm</th>\n", | |
" <th>bm_ia</th>\n", | |
" <th>cashdebt</th>\n", | |
" <th>cashpr</th>\n", | |
" <th>cfp</th>\n", | |
" <th>cfp_ia</th>\n", | |
" <th>chatoia</th>\n", | |
" <th>chcsho</th>\n", | |
" <th>chempia</th>\n", | |
" <th>chinv</th>\n", | |
" <th>chpmia</th>\n", | |
" <th>convind</th>\n", | |
" <th>currat</th>\n", | |
" <th>depr</th>\n", | |
" <th>divi</th>\n", | |
" <th>divo</th>\n", | |
" <th>dy</th>\n", | |
" <th>egr</th>\n", | |
" <th>ep</th>\n", | |
" <th>gma</th>\n", | |
" <th>grcapx</th>\n", | |
" <th>grltnoa</th>\n", | |
" <th>...</th>\n", | |
" <th>quick</th>\n", | |
" <th>rd</th>\n", | |
" <th>rd_mve</th>\n", | |
" <th>rd_sale</th>\n", | |
" <th>realestate</th>\n", | |
" <th>roic</th>\n", | |
" <th>salecash</th>\n", | |
" <th>saleinv</th>\n", | |
" <th>salerec</th>\n", | |
" <th>secured</th>\n", | |
" <th>securedind</th>\n", | |
" <th>sgr</th>\n", | |
" <th>sin</th>\n", | |
" <th>sp</th>\n", | |
" <th>tang</th>\n", | |
" <th>tb</th>\n", | |
" <th>aeavol</th>\n", | |
" <th>cash</th>\n", | |
" <th>chtx</th>\n", | |
" <th>cinvest</th>\n", | |
" <th>ear</th>\n", | |
" <th>nincr</th>\n", | |
" <th>roaq</th>\n", | |
" <th>roavol</th>\n", | |
" <th>roeq</th>\n", | |
" <th>rsup</th>\n", | |
" <th>stdacc</th>\n", | |
" <th>stdcf</th>\n", | |
" <th>ms</th>\n", | |
" <th>baspread</th>\n", | |
" <th>ill</th>\n", | |
" <th>maxret</th>\n", | |
" <th>retvol</th>\n", | |
" <th>std_dolvol</th>\n", | |
" <th>std_turn</th>\n", | |
" <th>zerotrade</th>\n", | |
" <th>sic2</th>\n", | |
" <th>PERMNO</th>\n", | |
" <th>date</th>\n", | |
" <th>return</th>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>permno</th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" <th></th>\n", | |
" </tr>\n", | |
" </thead>\n", | |
" <tbody>\n", | |
" <tr>\n", | |
" <th>10000</th>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>3</td>\n", | |
" <td>3</td>\n", | |
" <td>4</td>\n", | |
" <td>14</td>\n", | |
" <td>3</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>10</td>\n", | |
" <td>4</td>\n", | |
" <td>0</td>\n", | |
" <td>3</td>\n", | |
" <td>14</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>1</td>\n", | |
" <td>0</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>1</td>\n", | |
" <td>0</td>\n", | |
" <td>1</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>...</td>\n", | |
" <td>1</td>\n", | |
" <td>0</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>0</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>0</td>\n", | |
" <td>1</td>\n", | |
" <td>0</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>1</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" <td>15</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>10001</th>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" <td>358</td>\n", | |
" <td>358</td>\n", | |
" <td>360</td>\n", | |
" <td>370</td>\n", | |
" <td>358</td>\n", | |
" <td>371</td>\n", | |
" <td>370</td>\n", | |
" <td>366</td>\n", | |
" <td>360</td>\n", | |
" <td>336</td>\n", | |
" <td>358</td>\n", | |
" <td>369</td>\n", | |
" <td>343</td>\n", | |
" <td>343</td>\n", | |
" <td>355</td>\n", | |
" <td>343</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>331</td>\n", | |
" <td>343</td>\n", | |
" <td>343</td>\n", | |
" <td>343</td>\n", | |
" <td>343</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>343</td>\n", | |
" <td>343</td>\n", | |
" <td>343</td>\n", | |
" <td>343</td>\n", | |
" <td>355</td>\n", | |
" <td>343</td>\n", | |
" <td>331</td>\n", | |
" <td>343</td>\n", | |
" <td>...</td>\n", | |
" <td>355</td>\n", | |
" <td>343</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>0</td>\n", | |
" <td>355</td>\n", | |
" <td>343</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>355</td>\n", | |
" <td>313</td>\n", | |
" <td>296</td>\n", | |
" <td>296</td>\n", | |
" <td>296</td>\n", | |
" <td>296</td>\n", | |
" <td>296</td>\n", | |
" <td>296</td>\n", | |
" <td>296</td>\n", | |
" <td>269</td>\n", | |
" <td>296</td>\n", | |
" <td>296</td>\n", | |
" <td>269</td>\n", | |
" <td>269</td>\n", | |
" <td>293</td>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" <td>371</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>10002</th>\n", | |
" <td>324</td>\n", | |
" <td>324</td>\n", | |
" <td>311</td>\n", | |
" <td>311</td>\n", | |
" <td>313</td>\n", | |
" <td>323</td>\n", | |
" <td>311</td>\n", | |
" <td>324</td>\n", | |
" <td>323</td>\n", | |
" <td>319</td>\n", | |
" <td>313</td>\n", | |
" <td>289</td>\n", | |
" <td>311</td>\n", | |
" <td>322</td>\n", | |
" <td>91</td>\n", | |
" <td>91</td>\n", | |
" <td>223</td>\n", | |
" <td>211</td>\n", | |
" <td>223</td>\n", | |
" <td>223</td>\n", | |
" <td>223</td>\n", | |
" <td>223</td>\n", | |
" <td>91</td>\n", | |
" <td>91</td>\n", | |
" <td>199</td>\n", | |
" <td>211</td>\n", | |
" <td>211</td>\n", | |
" <td>211</td>\n", | |
" <td>211</td>\n", | |
" <td>223</td>\n", | |
" <td>223</td>\n", | |
" <td>223</td>\n", | |
" <td>211</td>\n", | |
" <td>211</td>\n", | |
" <td>223</td>\n", | |
" <td>211</td>\n", | |
" <td>223</td>\n", | |
" <td>211</td>\n", | |
" <td>187</td>\n", | |
" <td>0</td>\n", | |
" <td>...</td>\n", | |
" <td>223</td>\n", | |
" <td>211</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>223</td>\n", | |
" <td>223</td>\n", | |
" <td>211</td>\n", | |
" <td>223</td>\n", | |
" <td>0</td>\n", | |
" <td>223</td>\n", | |
" <td>211</td>\n", | |
" <td>223</td>\n", | |
" <td>223</td>\n", | |
" <td>223</td>\n", | |
" <td>19</td>\n", | |
" <td>217</td>\n", | |
" <td>217</td>\n", | |
" <td>210</td>\n", | |
" <td>210</td>\n", | |
" <td>217</td>\n", | |
" <td>217</td>\n", | |
" <td>217</td>\n", | |
" <td>175</td>\n", | |
" <td>217</td>\n", | |
" <td>217</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>217</td>\n", | |
" <td>324</td>\n", | |
" <td>324</td>\n", | |
" <td>324</td>\n", | |
" <td>324</td>\n", | |
" <td>323</td>\n", | |
" <td>324</td>\n", | |
" <td>324</td>\n", | |
" <td>324</td>\n", | |
" <td>324</td>\n", | |
" <td>324</td>\n", | |
" <td>324</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>10003</th>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" <td>105</td>\n", | |
" <td>105</td>\n", | |
" <td>107</td>\n", | |
" <td>116</td>\n", | |
" <td>105</td>\n", | |
" <td>118</td>\n", | |
" <td>117</td>\n", | |
" <td>113</td>\n", | |
" <td>107</td>\n", | |
" <td>83</td>\n", | |
" <td>105</td>\n", | |
" <td>116</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>5</td>\n", | |
" <td>0</td>\n", | |
" <td>5</td>\n", | |
" <td>5</td>\n", | |
" <td>5</td>\n", | |
" <td>5</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>5</td>\n", | |
" <td>5</td>\n", | |
" <td>5</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>5</td>\n", | |
" <td>0</td>\n", | |
" <td>5</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>...</td>\n", | |
" <td>5</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>5</td>\n", | |
" <td>5</td>\n", | |
" <td>5</td>\n", | |
" <td>0</td>\n", | |
" <td>5</td>\n", | |
" <td>0</td>\n", | |
" <td>5</td>\n", | |
" <td>5</td>\n", | |
" <td>5</td>\n", | |
" <td>5</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" <td>118</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>10005</th>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>52</td>\n", | |
" <td>52</td>\n", | |
" <td>54</td>\n", | |
" <td>56</td>\n", | |
" <td>52</td>\n", | |
" <td>65</td>\n", | |
" <td>64</td>\n", | |
" <td>60</td>\n", | |
" <td>54</td>\n", | |
" <td>30</td>\n", | |
" <td>52</td>\n", | |
" <td>63</td>\n", | |
" <td>36</td>\n", | |
" <td>36</td>\n", | |
" <td>48</td>\n", | |
" <td>36</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>24</td>\n", | |
" <td>36</td>\n", | |
" <td>36</td>\n", | |
" <td>36</td>\n", | |
" <td>36</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>36</td>\n", | |
" <td>36</td>\n", | |
" <td>48</td>\n", | |
" <td>36</td>\n", | |
" <td>48</td>\n", | |
" <td>36</td>\n", | |
" <td>24</td>\n", | |
" <td>36</td>\n", | |
" <td>...</td>\n", | |
" <td>48</td>\n", | |
" <td>36</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>0</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>36</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>48</td>\n", | |
" <td>3</td>\n", | |
" <td>3</td>\n", | |
" <td>3</td>\n", | |
" <td>3</td>\n", | |
" <td>3</td>\n", | |
" <td>3</td>\n", | |
" <td>3</td>\n", | |
" <td>0</td>\n", | |
" <td>3</td>\n", | |
" <td>3</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>3</td>\n", | |
" <td>65</td>\n", | |
" <td>58</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>46</td>\n", | |
" <td>65</td>\n", | |
" <td>58</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>...</th>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" <td>...</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>93432</th>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>9</td>\n", | |
" <td>0</td>\n", | |
" <td>11</td>\n", | |
" <td>10</td>\n", | |
" <td>6</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>9</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>...</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>2</td>\n", | |
" <td>2</td>\n", | |
" <td>2</td>\n", | |
" <td>2</td>\n", | |
" <td>2</td>\n", | |
" <td>2</td>\n", | |
" <td>2</td>\n", | |
" <td>0</td>\n", | |
" <td>2</td>\n", | |
" <td>2</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>0</td>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" <td>11</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>93433</th>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" <td>64</td>\n", | |
" <td>64</td>\n", | |
" <td>66</td>\n", | |
" <td>76</td>\n", | |
" <td>64</td>\n", | |
" <td>77</td>\n", | |
" <td>76</td>\n", | |
" <td>72</td>\n", | |
" <td>66</td>\n", | |
" <td>42</td>\n", | |
" <td>64</td>\n", | |
" <td>75</td>\n", | |
" <td>53</td>\n", | |
" <td>53</td>\n", | |
" <td>65</td>\n", | |
" <td>53</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>41</td>\n", | |
" <td>53</td>\n", | |
" <td>53</td>\n", | |
" <td>53</td>\n", | |
" <td>53</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>53</td>\n", | |
" <td>53</td>\n", | |
" <td>65</td>\n", | |
" <td>53</td>\n", | |
" <td>65</td>\n", | |
" <td>53</td>\n", | |
" <td>41</td>\n", | |
" <td>53</td>\n", | |
" <td>...</td>\n", | |
" <td>65</td>\n", | |
" <td>53</td>\n", | |
" <td>60</td>\n", | |
" <td>60</td>\n", | |
" <td>60</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>0</td>\n", | |
" <td>65</td>\n", | |
" <td>17</td>\n", | |
" <td>65</td>\n", | |
" <td>53</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>51</td>\n", | |
" <td>51</td>\n", | |
" <td>45</td>\n", | |
" <td>48</td>\n", | |
" <td>51</td>\n", | |
" <td>51</td>\n", | |
" <td>51</td>\n", | |
" <td>23</td>\n", | |
" <td>51</td>\n", | |
" <td>51</td>\n", | |
" <td>23</td>\n", | |
" <td>23</td>\n", | |
" <td>46</td>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" <td>77</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>93434</th>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>67</td>\n", | |
" <td>76</td>\n", | |
" <td>65</td>\n", | |
" <td>78</td>\n", | |
" <td>77</td>\n", | |
" <td>73</td>\n", | |
" <td>67</td>\n", | |
" <td>43</td>\n", | |
" <td>65</td>\n", | |
" <td>76</td>\n", | |
" <td>60</td>\n", | |
" <td>60</td>\n", | |
" <td>72</td>\n", | |
" <td>60</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>48</td>\n", | |
" <td>60</td>\n", | |
" <td>60</td>\n", | |
" <td>60</td>\n", | |
" <td>60</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>60</td>\n", | |
" <td>60</td>\n", | |
" <td>72</td>\n", | |
" <td>60</td>\n", | |
" <td>72</td>\n", | |
" <td>60</td>\n", | |
" <td>48</td>\n", | |
" <td>60</td>\n", | |
" <td>...</td>\n", | |
" <td>72</td>\n", | |
" <td>60</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>36</td>\n", | |
" <td>72</td>\n", | |
" <td>60</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>72</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>62</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>38</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>38</td>\n", | |
" <td>38</td>\n", | |
" <td>65</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>93435</th>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" <td>9</td>\n", | |
" <td>9</td>\n", | |
" <td>11</td>\n", | |
" <td>21</td>\n", | |
" <td>9</td>\n", | |
" <td>22</td>\n", | |
" <td>21</td>\n", | |
" <td>17</td>\n", | |
" <td>11</td>\n", | |
" <td>0</td>\n", | |
" <td>9</td>\n", | |
" <td>20</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>0</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>0</td>\n", | |
" <td>10</td>\n", | |
" <td>...</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>10</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>13</td>\n", | |
" <td>8</td>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" <td>22</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>93436</th>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>65</td>\n", | |
" <td>65</td>\n", | |
" <td>67</td>\n", | |
" <td>76</td>\n", | |
" <td>65</td>\n", | |
" <td>78</td>\n", | |
" <td>77</td>\n", | |
" <td>73</td>\n", | |
" <td>67</td>\n", | |
" <td>43</td>\n", | |
" <td>65</td>\n", | |
" <td>76</td>\n", | |
" <td>54</td>\n", | |
" <td>54</td>\n", | |
" <td>66</td>\n", | |
" <td>54</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>42</td>\n", | |
" <td>54</td>\n", | |
" <td>54</td>\n", | |
" <td>54</td>\n", | |
" <td>54</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>54</td>\n", | |
" <td>54</td>\n", | |
" <td>66</td>\n", | |
" <td>54</td>\n", | |
" <td>66</td>\n", | |
" <td>54</td>\n", | |
" <td>42</td>\n", | |
" <td>36</td>\n", | |
" <td>...</td>\n", | |
" <td>66</td>\n", | |
" <td>54</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>54</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>66</td>\n", | |
" <td>71</td>\n", | |
" <td>71</td>\n", | |
" <td>65</td>\n", | |
" <td>71</td>\n", | |
" <td>71</td>\n", | |
" <td>71</td>\n", | |
" <td>71</td>\n", | |
" <td>41</td>\n", | |
" <td>71</td>\n", | |
" <td>71</td>\n", | |
" <td>41</td>\n", | |
" <td>41</td>\n", | |
" <td>66</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>77</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" <td>78</td>\n", | |
" </tr>\n", | |
" </tbody>\n", | |
"</table>\n", | |
"<p>29853 rows × 99 columns</p>\n", | |
"</div>" | |
], | |
"text/plain": [ | |
" DATE mvel1 beta betasq chmom ... zerotrade sic2 PERMNO date return\n", | |
"permno ... \n", | |
"10000 15 15 3 3 4 ... 15 15 15 15 15\n", | |
"10001 371 371 358 358 360 ... 371 371 371 371 371\n", | |
"10002 324 324 311 311 313 ... 324 324 324 324 324\n", | |
"10003 118 118 105 105 107 ... 118 118 118 118 118\n", | |
"10005 65 65 52 52 54 ... 58 65 65 65 65\n", | |
"... ... ... ... ... ... ... ... ... ... ... ...\n", | |
"93432 11 11 0 0 0 ... 11 11 11 11 11\n", | |
"93433 77 77 64 64 66 ... 77 77 77 77 77\n", | |
"93434 78 78 65 65 67 ... 78 78 78 78 78\n", | |
"93435 22 22 9 9 11 ... 22 22 22 22 22\n", | |
"93436 78 78 65 65 67 ... 78 78 78 78 78\n", | |
"\n", | |
"[29853 rows x 99 columns]" | |
] | |
}, | |
"metadata": { | |
"tags": [] | |
}, | |
"execution_count": 19 | |
} | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "IEtoLXgBPUeh", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"## 2.2 Missing Values" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "F17S9eJPPUei", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"mean Missing % is around 30%" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "d8CR4o-VPUej", | |
"colab_type": "code", | |
"outputId": "fe902cbf-47c1-4b5f-9616-26ed9d8e7e9d", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 374 | |
} | |
}, | |
"source": [ | |
"(1-summary.loc['count'].sort_values()/3760208).plot.bar()\n", | |
"plt.title('Missing % for each column')" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/plain": [ | |
"Text(0.5, 1.0, 'Missing % for each column')" | |
] | |
}, | |
"metadata": { | |
"tags": [] | |
}, | |
"execution_count": 20 | |
}, | |
{ | |
"output_type": "display_data", | |
"data": { | |
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAFUCAYAAADf+HxmAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAgAElEQVR4nOydebhe0/XHPyuJxKyGmCIkCG2UGmKm\npmrNWrNqi1LVVmlLiRpLW0PR0tLyQ6h5KG1KiLlmTSKSSCJEEhJjIogEibB+f3zXyTn3dW/um7gR\nXuvzPOd5z7DPPntce+21h9fcnSRJkqSxaDe/A5AkSZK0PSnckyRJGpAU7kmSJA1ICvckSZIGJIV7\nkiRJA5LCPUmSpAFJ4f4Fx8z+bmYnfYL3f2Nml7ZlmNoCM1vOzB40s3fM7Nz5HZ6WMDM3s9Xn8Tce\nMLND5+U3ks8eHeZ3AJJ5g5mNA1YEVnT3SZX7g4F1ge7uPs7dD/8k33H3P3yigLaAmXUArgZ2AB4H\n9nH3KfHsN8D77n7ebLw4DJgELO65mCP5ApKae2MzFti/uDCztYGF519w5og9AAeWAd5Gwhoz6w7s\nBlzQyvurACPmRrBHw5Ikn2tSuDc2VwE/qFwfCPyj6sDMrjCz38X5MmZ2m5m9ZWaTzewhM2sXz44z\ns5fCzDHKzLaL+6ea2dVx3i3MDAea2YtmNsnMTqh8ayEzu9LM3jSzkWZ2rJlNaCHs3YEH3H0mcD+w\naty/ADg67jeLmV0RcT3WzKaa2TfMrJOZ/dnMXo7jz2bWKdxvbWYTIo6vAn1a8PeHEe43zay/ma1S\neXa+mY03sylmNsjMtqw8ax/mq+cj/QaZWdeK198ws+ci3S80M2vh+y36Y2abmdkAM3s7fjdrwY9Z\n+RXXRZ51iOsHzOx3ZvZopN1/zGxpM7sm4jbAzLpV3nczO7ye8CefLincG5vHgcXN7Ctm1h7YD5k6\nWuJoYALQGVgO+A3gZrYmcASwobsvBnwLGDcbf7YA1gS2A042s6/E/VOAbkhQbw98bzZ+PA1sGwJ4\nG2C4mX0HmOTuj8zmPdz9IOAa4Gx3X9Td7wFOADZBJqmvARsBJ1ZeWx5YCmn8h9X6aWa7o/TYA6XP\nQ8B1FScDwu+lgGuBm8xswXj2K9SD2glYHPgh8G7l3V2ADYF1gH1Q+jZHs/6Y2VLA7ajhWxo4D7jd\nzJZuwZ/W2A/4PtAFWA14DDV4SwEjUT5WqTf8yadICvfGp9Det0cV86XZuP0AWAFYxd0/cPeHwqzx\nIdAJ6GlmC4St/vnZ+PNbd3/P3YcAQ5AwBVX8P7j7m+4+gdmbVvohs9IAZJa5HgmVY83s9zFYepGZ\ndWwl/gUHAKe5++vuPhH4LRJgBR8Bp7j7dHd/r5n3DwfOcPeR0Wv4A7Buob27+9Xu/oa7z3T3c1F6\nrRnvHgqc6O6jXAxx9zcqfp/p7m+5+4uol7JuC3FoyZ+dgefc/ar4/nXAM8CudaZNLX3c/Xl3fxu4\nA3je3e+JeN8ErFfjvt7wJ58iKdwbn6uA7wIHUWOSaYY/AqOBu8xsjJn1BnD30cAvgFOB183sejNb\ncTb+vFo5fxdYNM5XBMZXnlXPmxDCq7e7r+PuhwG9gb8jDbEXsBXQEWmv9bAi8ELl+oW4VzDR3d+f\nzfurAOeH6eEtYDJgSLvFzI4Jk83b8XwJNF4A0BWYXWPYUnrV0pI/tXEjrrvM5puz47XK+XvNXNeG\nr97wJ58iKdwbHHd/AWnAOwG3tOL2HXc/2t1XRYOWvyps6+5+rbtvgYScA2fNRXBeAVaqXHdtyWGV\nGAjeDLgEWBsYFD2KAcgUUA8vo7AXrBz3ClobeB0P/Njdv1Q5FnL3R8O+fizqmSzp7l9CvQ2rvLta\nneFsLQzN+VMbN1D8muulTaPpoPrybRCu5DNICvcvBocA27r7tNk5MrNdzGz1GBB7G5ljPjKzNc2s\nsH+/j7S3j+YiHDcCx5vZkmbWBdnxZ0uE5a/Ake7+EWqotghzzFbAmDq/fR1wopl1NrNlgJOZ/fhD\nLX+PsK8V4VrCzPaOZ4sBM4GJQAczOxnZxAsuBU43sx4m1plLe3hL/vQD1jCz75pZBzPbF+gJ3NaM\nH08BXzezlc1sCeD4uQhH8jkghfsXgLCfDqzDaQ/gHmAqGkS7yN3vR/bjM9G88VeBZZk7oXAaGrAd\nG9+5GZjeyjsHA0+7+6C4vgVpqhPR4OEldX77d8BAYCgwDHgy7tWFu9+KeivXm9kUNOC7YzzuD9wJ\nPIvMIe/T1OR0HmrY7gKmAJcBC9X77db8Cbv7LmhA/A3Ui9ilur6hEo+7gRtQOgyi+QYgaQAs13ck\n8wsz+wmwn7tvNb/DkiSNRmruyaeGma1gZpubWbuYXnk0cOv8DleSNCK5Ei/5NOkIXIwWKL2Fpjde\nNF9DlCQNSpplkiRJGpA0yyRJkjQgKdyTJEkakPlmc19mmWW8W7du8+vzSZIkn0sGDRo0yd07t+Zu\nvgn3bt26MXBgPVOvkyRJkgIzq91qolnSLJMkSdKApHBPkiRpQFK4J0mSNCAp3JMkSRqQFO5JkiQN\nSAr3JEmSBiSFe5IkSQOSwj1JkqQBma/CvVvv2+nW+/b5GYQkSZKGJDX3JEmSBiSFe5IkSQOSwj1J\nkqQBSeGeJEnSgKRwT5IkaUBSuCdJkjQgKdyTJEkakBTuSZIkDUgK9yRJkgakLuFuZjuY2SgzG21m\nvZt5/iczeyqOZ83srbYPapIkSVIvrf6Hqpm1By4EtgcmAAPMrK+7jyjcuPsvK+5/Dqw3D8KaJEmS\n1Ek9mvtGwGh3H+PuM4Drgd1n435/4Lq2CFySJEkyd9Qj3LsA4yvXE+LexzCzVYDuwH2fPGhJkiTJ\n3NLWA6r7ATe7+4fNPTSzw8xsoJkNnDhxYht/OkmSJCmoR7i/BHStXK8U95pjP2ZjknH3S9y9l7v3\n6ty5c/2hTJIkSeaIeoT7AKCHmXU3s45IgPetdWRmXwaWBB5r2yAmSZIkc0qrwt3dZwJHAP2BkcCN\n7j7czE4zs90qTvcDrnd3nzdBTZIkSeql1amQAO7eD+hXc+/kmutT2y5YSZIkySchV6gmSZI0ICnc\nkyRJGpAU7kmSJA1ICvckSZIGJIV7kiRJA5LCPUmSpAFJ4Z4kSdKApHBPkiRpQD4zwr1b79vp1vv2\n+R2MJEmShuAzI9xrSUGfJEky93xmhXuSJEky96RwT5IkaUBSuCdJkjQgKdyTJEkakBTuSZIkDUgK\n9yRJkgYkhXuSJEkD8rkQ7tUFTrWLnXLxU5Ikycf5XAj3JEmSZM6oS7ib2Q5mNsrMRptZ7xbc7GNm\nI8xsuJld27bBTJIkSeaEVv8g28zaAxcC2wMTgAFm1tfdR1Tc9ACOBzZ39zfNbNl5FeAkSZKkderR\n3DcCRrv7GHefAVwP7F7j5kfAhe7+JoC7v962wUySJEnmhHqEexdgfOV6Qtyrsgawhpk9YmaPm9kO\nbRXAJEmSZM5p1SwzB/70ALYGVgIeNLO13f2tqiMzOww4DGDllVfG2ujjSZIkSVPq0dxfArpWrleK\ne1UmAH3d/QN3Hws8i4R9E9z9Enfv5e69OnfuPLdhTpIkSVqhHuE+AOhhZt3NrCOwH9C3xs2/kNaO\nmS2DzDRj2jCcSZIkyRzQqnB395nAEUB/YCRwo7sPN7PTzGy3cNYfeMPMRgD3A7929zfmVaCTJEmS\n2VOXzd3d+wH9au6dXDl34FdxJEmSJPOZXKGaJEnSgKRwT5IkaUBSuCdJkjQgKdyTJEkakBTuSZIk\nDUgK9yRJkgYkhXuSJEkDksI9SZKkAUnhniRJ0oCkcE+SJGlAUrgnSZI0ICnckyRJGpAU7kmSJA1I\nCvckSZIGJIV7kiRJA5LCPUmSpAFJ4Z4kSdKApHBPkiRpQOoS7ma2g5mNMrPRZta7mecHmdlEM3sq\njkPbPqhJkiRJvbT6H6pm1h64ENgemAAMMLO+7j6ixukN7n7EPAhj3XTrfTsA487ceX4GI0mSZL5T\nj+a+ETDa3ce4+wzgemD3eRusJEmS5JNQj3DvAoyvXE+Ie7XsaWZDzexmM+vaJqFLkiRJ5oq2GlD9\nD9DN3dcB7gaubM6RmR1mZgPNbODEiRPb6NNJkiRJLfUI95eAqia+Utybhbu/4e7T4/JSYIPmPHL3\nS9y9l7v36ty589yEN0mSJKmDeoT7AKCHmXU3s47AfkDfqgMzW6FyuRswsu2COPcUA6xJkiRfNFqd\nLePuM83sCKA/0B643N2Hm9lpwEB37wscaWa7ATOBycBB8zDMSZIkSSu0KtwB3L0f0K/m3smV8+OB\n49s2aEmSJMnckitUkyRJGpAU7kmSJA3IF0a4d+t9ew6wJknyheELI9yrpKBPkqTR+UIK9yRJkkYn\nhTs5Hz5JksYjhXsNabJJkqQRSOGeJEnSgKRwb4XU4pMk+TySwj1JkqQBSeGeJEnSgKRwnwNysDVJ\nks8LKdyTJEkakBTuSZIkDUgK97kkTTRJknyWSeGeJEnSgKRwbyNSi0+S5LNECvd5QNVkk+abJEnm\nByncP2VS0CdJ8mlQl3A3sx3MbJSZjTaz3rNxt6eZuZn1arsgJkmSJHNKq8LdzNoDFwI7Aj2B/c2s\nZzPuFgOOAp5o60A2KrUmm9rz1PKTJJlb6tHcNwJGu/sYd58BXA/s3oy704GzgPfbMHxJkiTJXFCP\ncO8CjK9cT4h7szCz9YGu7p6q5jxgdhp+kiRJc3ziAVUzawecBxxdh9vDzGygmQ2cOHHiJ/10kiRJ\n0gL1CPeXgK6V65XiXsFiwFeBB8xsHLAJ0Le5QVV3v8Tde7l7r86dO899qJMkSZLZUo9wHwD0MLPu\nZtYR2A/oWzx097fdfRl37+bu3YDHgd3cfeA8CXGSJEnSKq0Kd3efCRwB9AdGAje6+3AzO83MdpvX\nAUySJEnmnA71OHL3fkC/mnsnt+B2608erKReisHVcWfu3OS89lmSJF8scoVqkiRJA5LC/QtCLopK\nki8WKdyTJEkakBTuSZIkDUgK9yRJkgYkhfsXlLS/J0ljk8I9SZKkAUnhniRJ0oCkcE9ymmSSNCAp\n3JMm5B+IJEljkMI9mWtS0CfJZ5cU7kmbkFp9kny2SOGetDkp6JNk/pPCPZnntGS3z0YgSeYdKdyT\nJEkakBTuSZIkDUgK9+Qzw+xMNmm+SZI5I4V78rkjG4EkaZ0U7knDkgO2yReZuoS7me1gZqPMbLSZ\n9W7m+eFmNszMnjKzh82sZ9sHNUnmnuY0/Hq0/+wlJJ9XWhXuZtYeuBDYEegJ7N+M8L7W3dd293WB\ns4Hz2jykSZIkSd3Uo7lvBIx29zHuPgO4Hti96sDdp1QuFwG87YKYJEmSzCkd6nDTBRhfuZ4AbFzr\nyMx+BvwK6Ahs2yahS5LPCYWJZtyZO8/nkCSJaLMBVXe/0N1XA44DTmzOjZkdZmYDzWzgxIkT2+rT\nSfKZI+3xyfymHuH+EtC1cr1S3GuJ64FvN/fA3S9x917u3qtz5871hzJJkiSZI+oR7gOAHmbW3cw6\nAvsBfasOzKxH5XJn4Lm2C2KSfL7JKZnJ/KBV4e7uM4EjgP7ASOBGdx9uZqeZ2W7h7AgzG25mTyG7\n+4HzLMRJ8jkmp1Mmnxb1DKji7v2AfjX3Tq6cH9XG4UqSLxzVQdkcoE0+KblCNUmSpAFJ4Z4kSdKA\npHBPkiRpQFK4J0mSNCAp3JMkSRqQFO5J8jkgd6dM5pQU7knSQOSCqaQghXuSNCip4X+xSeGeJF9A\n2uIPStJU9NkmhXuSJEkDksI9SZJ5So4DzB9SuCdJkjQgKdyTJEkakBTuSZIkDUgK9yRJkgYkhXuS\nJEkDksI9SZKkAUnhniRJ0oDUJdzNbAczG2Vmo82sdzPPf2VmI8xsqJnda2artH1QkyRJknppVbib\nWXvgQmBHoCewv5n1rHE2GOjl7usANwNnt3VAkyRJkvqpR3PfCBjt7mPcfQZwPbB71YG73+/u78bl\n48BKbRvMJEmSZE6oR7h3AcZXrifEvZY4BLjjkwQqSZIk+WR0aEvPzOx7QC9gqxaeHwYcBrDyyitj\nbfnxJEmSZBb1aO4vAV0r1yvFvSaY2TeAE4Dd3H16cx65+yXu3svde3Xu3HluwpskSZLUQT3CfQDQ\nw8y6m1lHYD+gb9WBma0HXIwE++ttH8wkSZJkTmhVuLv7TOAIoD8wErjR3Yeb2Wlmtls4+yOwKHCT\nmT1lZn1b8C5JkiT5FKjL5u7u/YB+NfdOrpx/o43DlSRJknwCcoVqkiRJA5LCPUmSpAFJ4Z4kSdKA\npHBPkiRpQFK4J0mSNCAp3JMkSRqQFO5JkiQNSAr3JEmSBiSFe5IkSQOSwj1JkqQBSeGeJEnSgKRw\nT5IkaUBSuCdJkjQgKdyTJEkakBTuSZIkDUgK9yRJkgYkhXuSJEkDksI9SZKkAUnhniRJ0oDUJdzN\nbAczG2Vmo82sdzPPv25mT5rZTDPbq+2DmSRJkswJrQp3M2sPXAjsCPQE9jeznjXOXgQOAq5t6wAm\nSZIkc06HOtxsBIx29zEAZnY9sDswonDg7uPi2UfzIIxJkiTJHFKPWaYLML5yPSHuJUmSzDHdet/e\n5Ly4rp7XukvmnE91QNXMDjOzgWY2cOLEiZ/mp5MkSb5Q1CPcXwK6Vq5XintzjLtf4u693L1X586d\n58aLJEm+gMxOw0+apx7hPgDoYWbdzawjsB/Qd94GK0mSJPkktCrc3X0mcATQHxgJ3Ojuw83sNDPb\nDcDMNjSzCcDewMVmNnxeBjpJkqSgXrv9F037r2e2DO7eD+hXc+/kyvkAZK5JkiT53NGt9+2MO3Pn\n+R2MNiVXqCZJkjQgKdyTJEkakBTuSZIkFRrFHp/CPUmSpAFJ4Z4kSdKApHBPkiRpQFK4J0mSNCAp\n3JMkSRqQFO5JkiQNSAr3JEmSBiSFe5IkyWz4vM55T+GeJEnSgKRwT5IkaUBSuCdJkjQgKdyTJEka\nkBTuSZIkDUgK9yRJkgYkhXuSJEkDksI9SZKkAalLuJvZDmY2ysxGm1nvZp53MrMb4vkTZtatrQOa\nJEkyv/k8/ZFHq8LdzNoDFwI7Aj2B/c2sZ42zQ4A33X114E/AWW0d0CRJkqR+6tHcNwJGu/sYd58B\nXA/sXuNmd+DKOL8Z2M7MrO2CmSRJ8tmiVouvPS+um3M3u2f1uKuHeoR7F2B85XpC3GvWjbvPBN4G\nlp6jkCRJkiRthrn77B2Y7QXs4O6HxvX3gY3d/YiKm6fDzYS4fj7cTKrx6zDgsLhcExgFLAMU7qrn\ntdfz0t3n7VsZ3sb51uctvJk28z+8q7h7Z1rD3Wd7AJsC/SvXxwPH17jpD2wa5x0iANaa3+F+YHPn\ns3vW1u4+b9/K8DbOtz5v4c20+eyEt7WjHrPMAKCHmXU3s47AfkDfGjd9gQPjfC/gPo/QJEmSJJ8+\nHVpz4O4zzewIpJ23By539+FmdhpqSfoClwFXmdloYDJqAJIkSZL5RKvCHcDd+wH9au6dXDl/H9h7\nLsNwSQvns3vW1u4+b99qCz8yvJ+Nb7WFH436rbbwo5HDO1taHVBNkiRJPn/k9gNJkiQNSAr3JEmS\nBuQzIdzNbCEzW7NyfVX8HtXG31m/5vpj4wRmtreZLdjM/WU+wXdbn5P68Xfamdk+lfPNap4vYmbt\natwv3NL9ufh+95buFenWnJu2wswWNbNr6nDX3szOqVzPSrc5ee/zxLwM+9zm7dyUsU+LWvnyKX53\n83ruzbPvzw+bu5ktB/wBWBG4CLgAWNTdO5vZusCjwAnAQcCewKJoz5oPgQ/iWADYHphW8foFoCOw\nAtAJeAVYHFgQuBNYJ/y4CbgB+Ie7r29mZ7n7cRG2J8Pvp9z9+3FvT+Bc4Njwa3W0oOBLlW8PjG9/\nADjQGXgtnv0GeB64C7gPeKfy3gGV84WB5yrXvwROdPcrzWywu69XScPHgW+4+1Qz+w7wBNr6oV2k\n2Ybu/i8zWzS+uyUw3N2/XPHjEHe/rPI9zOxMd+9tZk+6+/px7yp3/76ZDQLeApYEhgNrRfrd6+7b\nmdkewBYR/6fQ1hWbx/XDwGPAysAewDC0qvk64Ay0b9GCkW+dgVeBF4GjgEPc/achQI4GVnb3H5lZ\nD7QY7kR336QSh4Hu3qtyvSTQI/xfDZWD59GCuitonl2Au4FzUNnpFXEYCzyCZo+1A24Fdnf3yWY2\nLOIKsBjK57dR2XgF+MDd/2RmZwFnAvu7+0UhSH/m7seEYnMYsJy7j6umfyU+iwP3At+KW8sAvwZW\nQZMkugKvufsWkWeXA9e6+5tmtipwPlq/4hHGd4HpwCDg2xHeLdE0aIB9I52WoqlCeAPQDfg+MBP4\ncYThm8D/AcsCO8WzbVF+vuTuO0Y8dgbWinwp+CtN61WVbkgmbA1Utze5NX6XRvX9QVQHlg13K6I6\nfVs8/zLwv8r7CwKn0nSCySPAzvHNDuHPUSg/q98eE7+ronSu8ivgPAB3Py/i/GSEf0kk91ZGMmkw\ncIy7v21m2wDfBYbE975OmbeEf9t+LHVqmF/C/Q6gDxLgM5GQfsndFzKzU9BCqSIBXwWWi+vJqEDe\nBCwPbAL8FtgNLZw6HQm484CTUQVeEQmSK1CCLQe8hPbD6YwK8mpoz5zFkZA5BAnLC+P9jYCJ4e4d\nlCHjI+zLoQy+H/gZKmgvxfkM1ODcCuwfbjuiBul1JPx7IkEwCVWcRYE7wv9Dwq+dgeNQZftPCJKn\n3H3dSM+n3H1dM3sKIM4Hu/t6ZrYJalA+Qo2HA9PcfXEz6wdc4+7XhD83o0q4ECrwH6FK/yoSwEfH\n9ZvAhqghGxD5MAlYJMJ7AfDHSP9iWuwNSOjMjHe+EfenAL8HfgLsiirVzcBWkW9fifQ+CwmRCUB3\n4NDIn3XDn3bA+6ix/F74exLwHeDwyMciD18Aroo8momEdmfUaA8ARsezm4F/ooq/UuTbQhHPGZEO\nywOvunt3MzsbVdRrw/+n45uvonJ6v7vvWDSclTwaCHRy97Wj8m8CPOLuG0aP7d7wd0E0HdmBqXE+\nDQnn/pHe9wI/QnXq5bj+EmqsBgJfQ4rVdajReivi/R2kBKwQ6ekRv+lIaHqk75hIw4FIEfkpKseL\nAY8Dm0Ve3IPq2DPAz8PfHYFtIr0XRY1T+0jLBSKOhSAFlb92kUdQLpC8kLIRBRiH6kpXVP7Wjnyc\njMrP/mgtzsHhvk/l/MeRVh0ifV8HRlTiuzGapXIAqrsPRLgKij21DkTlak201udRYP3w40VUJrYB\ndogwdkaKzMtI2D+MFKKl450vA9eghn4SKo+vFh9190G0xpyseGqrAxgQv4OBx+P83fh9Chgaz/4W\n94YirWLlYqUWasmerPj5FCrAo1GLPQwYEc8GVdyNQAX8hEhERwV4PCokw5F29gAS5C+jbRLaoZZ0\nJBLSdyMNYbGaMDyICvPrqLA8Fc8Wotxu4R/Ah3F/lh+oUJ8AvIcqzusRlrGoQHkcU1BFmFpJnw2Q\nZvwIKlTDKvEdHMeD4fc4VNhvi2/sjwrpa6iwvxHH2PjOR6iCFWH4MNLmg0jvP6OCvRWwVXz3aWBk\nJW2GRp4MjetFI7wvIAE0Pe4/EXk9GDgljlfj96X4HRxlYPWIzxXAQ/GNPhG2dyL8MyK+0+P5esAt\n8a2rkADoE9+4Cq3jIPL5QbSGYwyR//GsfSUe/0aV97JIywviWAYJu1Go8foQOCLSYFqE5f34nR5x\neifSucjf4vrv8fw5VLbOiDAXx6T4vbymnq0T334GCdvdIs9eRI3NyHBXxGtQ5Nvo+NZW8W5xvhUf\nL/MD4t5gysah8O9JKqvb4167mnC8hnoEi0Ye/h+wU8X9jsDFtfW4xs9FaVrWHq6cF/JlcLUsVs5H\noTqzA2oEj0Ey4Jnqe9V34nofpMkfhaaB7xHHCNRgnoJ6a79H5e8U1NBvVJEV1TANjvcOqvnuIOAH\nwL/nWM7OJ+H+AGqhnkSV4mSkifSIzP57PNsECdKhkfjj0Xz7t1FF/1b497MoLFdSCsTHUdfrOGBU\nuNsXCehhEYYr4hvXocaiOK6K592RdvFefGMQ0sJXQIV+FNK4qsL9hfj2E1FYhqEewU+QwHwWabV/\nRJrhO8AvkOayVNybhipZb1rYxgFpzs+jCvEa0rJ2R93qt+Lew0iAbBBpWFTQ51FDuEsU0sGoOzw4\nwjCUcjuJwcAZle/+vHLeuVK57kB7XhTP/i/Sq10czyETx+NIi+4U+fkSEiavIOH3aKRhYR47Brg+\n/HwUNZJPUva43q1WkGbSaUAlb55GDe7wuLcgZSNYm5d/pBS6Q1HZfAI1iodW3juwcryItPwD49k3\nUdnpE/lxK+o93oca1ktQebsf2C3eOSPy8d64HlhpHO9Eva+i4i8Vx6lIg16hcm8p1Kv4ecRhYsRx\nINJ8f4cahWvim0tHuTgH9QL6o4bw+XC7UwvpdEslv0bW5NcB8d1NkcKxb3xvFGpMN453n0Zmr4lR\nHtap+L9b3D8Hab/VeG4W6fICkh/DkZlnO+BSpLTcg0y6M1CdnYTq4dA4pgDv1ZSZbVCjfyShQKKe\n4zfjfGuk5LwaxztILvVBsumdcLcKKufPxvWdwMJx/hghlyrp+l7luvhukbejq3lbj5ydX2aZ9YG/\nAF9FLd0aqHK/jwTTOGQe6IAy+1p3/0sMTD6GNN/DUIQNFZ7vufuNZrYhKgz9kBBZAmXAdJTYhSD8\nAAmfbVHBvBJVuC1QhTjE3d+K8F6KGpc3UCXvGP5MiW+8ihqIA8LfK1DB2wQVkq4R9UGoQG2HKtmV\n8c53Is6rIMH+d8p98bdElW/zSKv33P0fZval8GcUEnh7R1wcacL/RILlb5SF/VVUCU6KuMxAmmHR\nDV4B9VQeRt3LwpxU2C1/4e5Xm9mmSEuZHH4VpoKOEcepqAJA2aUuTAhF9/fDiPM1SMgZMqt1Djfr\nocbxdVT5P4hnqyKzyLtxFL2haagLfh0h9FH52AY1XNujhm3J8PMa1GCPpBwfWC3S+GVkLns5wr9s\npEd/VJ42Q+Xpv8ic1x8pAj3D3yI9Pd7/IOIPUOzb+gwqpx8h4fQbtLuqo7pwFhJEf0FjLweF2+0j\n/f6Dyk6R9u9HfsxEJpJFUAZczOkAACAASURBVI9jyfD/AiSIn6GkGx9nKmqMF0b14rcRp0UizoYa\n3ZmoXO6J8nKF+O5bSFudhsrGcqhMrB/p8EqcHxtx2y7SEFS3u8S3/oYap6KBPA81SoVZkPjmZHdf\n0cy2irRtH2nRKX5vQz31lZAs+W98d3r48fcIwzU0HQsbE9+0uL9ApPv7qJyD6uKiZrYGcJ27b2Bm\nJyAt/Q/xze8B77v7l8xs34jDPyOt9qMcR+yI6uM98c0t3H1JMxsb31oJmSQB3N1XpRXml3DvhCK1\nJorIKKCdu0+P59ujTD0QOMDd745Bw/uAh9x9HTNbFmlB9yO78Vcr/rdDGvNeqJI3i7u/YGYLxPcf\nR5V/USR4p7v7ThU/L0CZsjwq+MVAyhrIxvc8qox7oUpTnV1zYs2nT3f3lSt+r48q6yVAX3cfHPdP\nQVpcFyTwPkBdzr3M7GfAj919nXC7JKUgvqbSMK2NhH4f1CPZBpmUpkZ8T3P3y8PtKkjwFWlyHdK4\nf4eE6oNIQGyNGsE3UYEfHL/Xom7qwahyne7uG9eme4S9n7uPrX1W4+58JNCvi3gdE+FZDgmtfVED\nex/Kl8tQHpyK8vENJGSWQgKmJ+qRDULlb6vw/yY0zvNy3F8Y9bbuRpUeZNPtBpyNeoQTI11PirR4\nMMLYHQnje9z97RbidRLqMd0St74dYfgzKvOHo8r8FFJcHkca7AikVNyI7Or3xrOrkKD4ubtvbGZn\noDx8GZWbooEFCbpn3P0MMzs5/B3g7j8xs5WBFdz9CTP7Gyp3N1GZtODut5jZBhFngAeL8hpx2wMp\nJESYVnH3GWa2qruPqbjrVKnvw1B9n4wE++nAD8PpxcBpqNHYB/hNpcwPcfevxfloNIB5FhKU20Z4\nX2guDyrhOBApYRNRHS44FZko93YN3o9FdXEY6v3/DHjB3V8Mf4ZWwvU91BP6JcrLM1xjK/9D9ejX\nqKFeM8J6L5JTP0OK2l9RI/IQahy2QA3EkbOLy8f4pCaWuTmQRvAf1MUtjrfi92ngrkqX70hi9goS\nHP9Cle9EYFLFBPAk0m5ORBX+baSxDEKjzcUsjYeQaWQMMt+MiXd3jYybGtdVe9j6SONZH2ltGyAt\n7yuocqwY/j+PBFt3mpp51kCCuzpb5rGK//sA4+L8IKRlnxvhL2z9Qwlbf8XMUJgG7kZa7uC4vyRN\nd/IsuvGjgKUr948FnqtcLwn8NM4L08WlwJg4L0xa7ZA2NYRy/KHoRr5byeOhwLZxvUfNcTcSqI+g\nnsLLkRfFcQESZBcAF4QfAynNLMMr4X4caF+5vivctqfsGT6CGqXRSJCsHGHsFu9ciSr5Y/He9Ip/\n/0ENZ2GjXijy7J9I2/pH3O+ETGwTUYP0e+DpIryop1LY2qvjD6ug8n9H3F8XeDGefZlyjKAlu/He\nSBMcgsr/LajXdStqcMegOnIu0livRgLjJaTZfhD+XFJJ8xGRViPQuEJxrI9MKEUaXh7HBZXjdCQI\n/wUsWwnnzqjMnRz5fXLcH4aUg6IMLVikD01NfYNr4n0raly7RV6eGPf6IKWvKl/eiN+iJzUljpnA\nlGZk1INI4fzYdcT3UlSeto60rR3vWDbCu3KlDAymHI/ohHoRv4n0OBk1KOcgBWVQxOmDuNdxjuXs\npyzUl0eCcSTShA6N4yjKAbnLkdB9CNmcb0Xa5AdRUJdFMy+ORa0ZSNiPRd2ol4GT4v5uqCvzYTz3\n+M6yqGXdB1XCoos9EbWib0bGFwXjbdTSDkRazPQofDORkJtZ8b8QikXDMSbcF/bEDVBr/grSIseh\nVns8mhb6X6SdHowq/IWR0VdF/J5HDctE4IpKoWmPBMgwpEG+Wak4xcDdNJoKlSHIjn1jxe174fb1\neK+wGf8NNRxDIhyTKBvo6RGHVyMt3og0fwbZqQthMSL8KsIyHgndR1CD+4fKcWDk5S8pbdgXRZi6\nRriGx/E+UgqKeD1DOc7yanxnWnzjI9SAFwOZT1femYmE9dA4Hx3PtkJjIVPDr9dRhd6GaGzC3Z2o\nbBZjCUcDp1YE+NPx+yjq7a1SaZyfjrQdQKltd6rJv9dReR6BpjP+FmniI1F9uAo1lDeiMvwsEko9\n0OybIs8fRCaVsXGvGMw+EI0bHBjH/ZXf4ng20qhIw8lx/nMk8N6N5x9QDsD3R+V8fOTb3yNOt6LG\n4lzUkEyiFOrHRlyL2VVXovpwVeT/UkiJuhgJwYmRh4dW3psYfr6F6uOuSIb8qZJn9wJLVJWkOL8i\n8ulZ1CP4H6p7xyPNe0z4PTrS80JUzv8T35sWx4cR5wXiWwOQOeleyoHXo4ujojysHcc5qMdaxPmz\naXOPLtBBqOUfWHn0DhJUt1TcroO63XuizHqRUtj9C2XEge6+tJn9CtnI16rpqg1B3bMJrmmWI5DG\nfEg8vw5VxrGosXkOFa7zUGZeF8E5Bdmbh8ZUte+h7vqKqMKtHv7/CnXlTkWZWXAMarSW8thwLUxT\na6KK+DWkfS6EegNDXKani9CUwXNRgVoMCfFxqPC+hATe31AhHIUq1FeQCeJHSDi+jLTI8+Kbd0da\n9q7E/x1U2fojuy7IvvxOxPE0ZJtcEVWwF1CjvDgSRh7vXxHxOY6Pm6PcNV4wCg2aFXZPzOxAd7+y\n6tjMdkSVt+gub4mEybuVsIG6sUdEGr5LOYbxVzR2cT8SLN8CXnb3LcP/QyKMDyAzzsbxTl+kwS+B\nyh1IAxuFejcLIi1u8TBfrILy8XeoAXwRze/+VU2Znkw57bJbpLUj2/ltSFiNQw37s0hYvR/pezBS\neApWqJx/hPJ4SSRYl0ANAEjxWQc1QuvE9NfNIh0nR5q+4e4Lx5jWXS4TwhqoXC2H8n48Kj9rRDqf\n5O5PxnqLzd39wzCNfBs1fFtEvh0Y4bgcmVpujLD1opzfDVJIRqOxrV1R+V0fmaoeQj34v8Y7BR1R\nw7SqmfWp3F8kfh3VhV2hNNFU10GY2b/R+M7d4a6w/78Rv4ejxmiruP5v+FlMFf43TTkdrWEopqNe\nj8bL3om8eCPeK/7N7nWkhBLhqPJcJTxQjhO412Fzn19mmT0r5z2QVjSCUtN9G2kBD6BCOwkJsYGo\n2zgVzWYophn+GVXqh5BmciaqPENR5X0bmRLOjMQsRu/XB9avtJZrthDepyvng+P9d5G29RRqmduh\n7u67SMvoE8fllCPeQ1DLOxxVmNWA/4W/T6IK/mVKDbRb3D85zr9OOZWqHZqBc3PEe3J8/2pUgB6K\nZz9GgmAX1CCdTTnF8JGIwxVx3AicW+kNnQhcUsmnZ4Cucd0eCcB1WkizvoQWFNdLUk4zvAMtWiPC\nNZhyCuBUVIj7oYb2v6ihPQlpxuvX9AR3Q5XyiEir3ZAg7o0E+3NIqBgS8h1qwrkiaoh2Q2M4X69o\n2tXjB5GHhZZ4OzIv3RdHHyT8/xXnz0barhbHbUhQPxzHRDSwdyDSvH9KaZbYJOK9VYSrY6WMHo0a\n0VtQw/1E+H0xqjtfQtrhkAjrCZGOr6FyOASVkR6oUekfYfl9hH/v+NZ/0SDzLFMI5WDmrsATcW8U\npeb7CGpYil5TYQ5cPMKwJOVsqX0pe0YXAJvF+XCkGN0U/u1PmB+bKWP3R9qdDny1BTcjgVUr191p\nOm3ywMoxlqY9xVWoTLeuvNOHcq3ELqjxLWbBFLObhqCxLwhTTI0flyDN/j7KXtGkmqPoGY8jpk7P\nyTHfdoWsrE77WURsB6Q5XYQK1XTKFZ5XxO/eqBH4JxI8PZF9dXPUI5iMMulgytVcV8f5YkibWBwV\netCg52qohV8QCYCpqBKchrQEQ5XKUUu8DuqmTUEZsFy4GxFxGOnum1nTrQ7+g7rYi6HGqj3SLsYh\nE9EEVODfR3O3x8S9LVBjtDgScOORVjprAYPHSrUYYD4MVcaFgNfd/UHTUvw/oobSkKZ2grtfGwPP\nP0YaOUh7uTS0sBso59gehgYr10SV73vx3prAHt7MwGHtitrqPTP7J9Lu70WV/HZge3df1szuQr2J\nY5Bm9l/UJe2AKvs+SHBvF+fDkBa0Apr3v2IMLt/lWgTUK77zSqTLoqhcXYrK29eJ9QiEzd1rVv+Z\n2Y3Ixnw9qvCvRFp68evuP4ye4eqU8+uXi++NiDhs7OWq065o/OTLNbPHFkB5v5e7D20mHFMoB3l/\nEenwC9SFn+DufzSz3yLN+CvI/PJz1Ng8xMcHQL8cfhiafjky7g+I9BuMIriemX2AGqYF0MDgOaiu\nbo+E0MaoYX0FlbmdkIJ1EOWq8htR+TFUF16NdD0EKTbDIi4D0eDoIZHPt4W/T4VfBUuEX9+P94tt\nEF5AjVBXJEiLMalV0ESE/tRgZjsgs809KP+KKZErRfod4u5Ph9tixSvhboZr5szjEecZqGxfD2wQ\nMmEBpJB9HZWnxcLfYhaSewzKxjc2R/JwkUjfp4pn7v6P2vDXUtd+7m2Nmf0dZcI2SNBtgLSTF8xs\nKNDd3XtGQm3h+sOQoiv7QyT0JqPCcR1wlLtPMrPlkcAciRJuU0qt+gCkaV7j7m9EOEbHt69G5psH\nohCPBl5x9x7hbiGkWf0ionAhSvRiDve74X93pFmBTCkFIyJc57j7qJq06BF+dUUF/xJKk9X/oe7d\npahQ3oy61L8O90eEQOmMCnl7pLVtCLxvZq8hwfeKux8YUxg3Ro3ltYRdz933aiabVnP3fc1sf6QB\nf5NyPnNHVECfB4aZ2d00nVFxJNDOzJZ09zcjnktRlrdiLIPw527UQIMGfC8z7Sv0fgjNdyhXEi6N\ntKU9UaPwPqq43YFJZjYl/FkkzhdB3eBVUT4XLIYGwy4BdnX3bULQ/aGZtLgMNa4foZ5AIdRBCsKe\npv8R3hEJmG8gE0KRHi+Y2bCKYN8VCcbuZtYeaaeTUd7+BTjUKzNLKnzV3XuGH1ehfF8QNZQAa8SM\nl8uQRr0eaoCPdve7m/EPd3+GptMjCyaZWaH4vByNbnukeX+AGrHFUF2bgLT/rvG7Hqp7b6J829jd\nx5n2Z7oXleXbUPk+wbW9xjVIKBZmiB7u/l/gv2Z2KOrFXBT+XRzx3g5p1ntFOXWUR8ujOvpEpE0P\nJBiPRDOFqubAsTRd7booKpOHIsXrKlTONgcuC1MhKP/XQuX+XkpT5nbx3XXj2A24Jky2+0QaHocU\n1ItQg3NcNeHNbDvUw56CFIXxqGHcsogzmg4+W+aLcEddsHVCkE9FttCxpn98+h+wV9jclwe2iEp6\nJtKAF45wjwYmuvttZvbT0FA7o27OS6jC341a0M4ue/yxwC/MrJgn2x5VyA9cezoU4SuWUwPg7u8h\nYX1uCOMTkKBdECX64nFvKVQYx6LWeFHKngDAWma2VpwXAnlfVFAuR136/7n76wARzoeRvc7jm4e5\n+6CYJngKErxvoAJ/KDJvHIIG3LaJ9CwKwp9RISwEx9XoLxSLaW1FfNcBZkSjVhT8jnG+MWrIPozw\ngwR/rX39XOAxM7sprvdGXX+8YluPxul0oGNUgGIM5RVgSdOWCAsiM81tqBF+HpnaRkd8i0pWbKtQ\ntR0PcPcNa8JWTJfdzd1PMrNdTVPznrGmG9ht6+73oQai2KtmRZpuU/AT1LtYPoT4L1F5Xq9IOzN7\nD1jCzF5HA4WnokbiS9FL2gp164ebWQdgiGnaXLXB3A140sw2cffHIzwHRbrcHem9ImpYVkZCrChr\nc8PPUMP3ZVQP30ZmlOPNbAU0JfeuELxHUU7d3AiVjaPcfYqZjQQ2qPRkl6Tcs6VY9UwocB3ie18B\n3jJN1eyAGqgOcVzs7j+PdN0IuNY0lbI7sZVCKG/7hZJ2J6onnZCg/5qZVTXfXkj4Pxfn1yHF42Wk\nGF2BlBqL60GoPC4axyRkylk+4jEtytbWKF8WiHcXizj/LcoUZjadcgEZSEn4AZIj4919g9pMMa1v\nub72frPMqR2nLQ5Ke93jSLguhVqoPkgTeA1VmJFIM3uFcjn8eCToj6G0Yf+J0lY6BFW4jZDGvDHw\n+3h2GuUS9lPi+08js9CfkAnkElRIRyONao9wc0B8610kHN8IP6airuQ/0EDa4UgzORW1zKugrtcb\nyL7Zh3IU/3hkK34B2en7oUbhPdQofYS0pAloRP41pMkvFWFcKr5dTA98l9JuW8wk+iOqmAchoXgH\nahSJsI2gxr4cz75JOYPoRcpFZtuggcO+4X51JMzWpma6FjKbHRFHT5rOyilWCBaba70b6XkN0p6/\nGt9/HnWjiXx6AAnIgZGX/VGjV8xQuDvysbAdn4cq0KYR9y2Rhjwi0vmkyKsH0eBYv0r4T/XSxno5\nzW9TMIhy+f1ukVYfRnp9hMrsVqjMnBjheQWZ1wqb9J9QediScgZZoa0W7w5F9eEj1AucFufFlNSl\n47tT4tkMyul+79DMdL/Z1M+FkTA8ACkt56Je7y18fArzFMqFYH+JtHk18uCeiNe4SKPnUF3qE2lw\nDdJCz0b1enLk8XVx/XTEYyQqd+1qwvl4fGdFpD2/gBqP9sj0cy+qg+9GWhX5d3ONP5dUytc4VOce\nRWVvBio7BwO3hbvr0MDxeMredLGi+iZU159HjUAPytXGTwK94vx01IBMDbdTKbf2uKcmjfui9S/E\nN0bVlY/zSbifhDTrPSOxX4nIFsvUO1bcLo/mzI6K8/7xzkuRCWMppx0uiyrBynE81cy3h1TO+8Tx\nD1RB3o/jqbhXPH8DVe5D0WwLKCvVspTCtTPlPNbi91toAGwtSsG7ZjU8xFxgJLA2Cr/bR4G6GGlR\nk5CQL+JbbOJ0S/hxQLh5GwmpV5FQ/U6k1XmokeldKYizlooXwrgmrZYO999FhXgS0raujmc7oQL+\nABLELwI7tpDnSyNzwZNIuF+ONKlRVBqVVspNdWD7lJrjL0izvx34S8VdMVhVrC+4H40bnBtpUAwW\nbkVl8DLuHY3GV6q/ryOz2K/CzZ2UayWGoEZ0SjzbBrgszq9EO3US6fBdym0NJkQe3Y8E8/3oT+aL\ncKwS5WedSOsHkPApdpws1oi8TTkX+2ODeHWkb9HovRjxujO+czRqgI6mbHCK4xmkpDyFeowHRnyO\njbwYhBqEvxHbhcS31kdjKG8hRWp0xK/Ij6Go7iyOFi5CU2F3W6T7mZU0GoRMQa+jXvDKqKytgmYE\nQWWtSCUcxdEbmV5fROV6ehzPI4WjGDg+K35/jsxy11BuBjgZyali24pNKNejnIpk1gOowXkf1eWT\nohxshWRhNX0fqRxFnM/6LAv36t4U16Mu3ZJRsN6l1JjurbjbBZkBHkeFfxDlfhy7okZhWmTuR0ho\n3I8q0VLh7ieR+MXc5nWQNrV+TfiupOlMj+Gotb6rklHvoFZ+PeD5uNeu8rwopOejCn8ZZaXviUwk\nxyJtvFjEMDD8qO4x8bH9UuL+hqhbuBJqgP6JeimHIdted8oZHJvEO7UF8YOIxxTKTdTeR93I21Hl\nfoIY9W9GSD2DusHFs9WIDZeaCe/dUYi7x3Ei0lDOJvbsCHdnowpdzAueiLaWAPWq1p7DslYVzK+i\nBm0gGhyD2QhByobjWsq9cR6m3ITsVMrFPsX87ofjXqFwTI/fGZG+z1POXnk3wvJ7YMH45r+JDfLi\n+scR7nHh5ytI46sKgIFoMP+fqJyeDzw6F/XyCWQ3H0xZR56ucbMHTevvrUhRO5Vmej8Vd8uhOnBH\nXK+LxpjuRgpKH9SAvIIE6SjKwdRX4rvPRbkZE2k+EpXjohc4aw1H5bvFbLRBUa6MShml6Rz+d6KM\n7R/XOxNWhho/qxsWFvsrFfW9sEoMptydstju4k409rNOxPtO1JD8lXK20Jiab1XzeXNgpXrzc35t\nP1DdK7zYrnYsyuQbUQV5ENnAHo3XDkCZcycfH/T6HeXc0i9R2sTeotyr5JV4/2Ck2XUxs78g4f8e\n6oqORtrtWt507/S9KTX5fqhVPweZDhaO+08h+/lQpAHci4TnqqgwXgP83TWf+GJUaKci4dUDFYbl\nUAVeKb7zCnCEu69mWtZdZVM0s6M6j3pvd7+pct0OFcR1qy9aZal05Z5FmI6lnOnxOpqFsgjqBfwr\n4rk66gKvgISVu8ZQDFWm5mzcT3tsERFxOSvSpro/+NRIz2lIA9wFCeZXUcXuEGlV7NK4LBoLmLUH\nfjPxuhaVo75IO9uSshd0N7Clx7z3ljCzB4Gd3f2duN4i0uKhCMtLaCbOD9Eg4c6RdpOQENsDNbpf\nIvb3roT3BWv6/waLoEH+F1B53BbZt3eoCdMiSAn4yMy+Rjlvfl+kBM2aOFAvZvaEa/uCwUgo/QW4\n2mPdSLjpE2F6EM1quhMJeEe9sw6op7tKvDIWmSrORorZqq6tjW9Cg4N7oh7x1qjRmoC03ekof0F1\nsjB7/Rst3iq4EzVsXVGZWYmms2kmoFWg/0Ea9VTUoz844jNrWwQrZ3NdhDTld5BZp9hjZg+kmCyD\n8hZKc9gQZGL+DZI7P0Ca/elIAXwj4vhllFfdUOP1GirvXcLfwpT3Xvi/PLHVr5fbG8z6/4nZ8akO\nqMZsli7AQma2HjHQEKPDS6MKeDmKbE9UYbYMd2OR0HmackYKaMBwOVTg2iHTwnvWdDHT0qiw3O/a\nG+OP8e5AVGBORhViozi6mtnv3L0YJLwXDSb91MyuRANIO6Ou+HFIIH6EbHe3VuJ6QIT7bqQJF/tc\nbEq5tUBHpJ2djWzCRYv+S1Rgi42mdo3fojXePdJslnAHjjftwXM4asTOQgOV0ygLywLAK6a9cuSh\n+5Hu7mZ2K5pzXNiL/4wq5p2okF+BbPgF3VB+3m5aoLY3MKBoiKoND3CXme2HGu+z0WyGLu5+TMXN\nrEbAtFnbTa6B7rGV+BfchwT/KNTYtsRKqGc2NfxfFKXpDkibW2c27xYsh8peQfEnFk+hwb5C6LyJ\nBPwA1MtbAs2rfwk4xczeQsKhGl9Qj6uYEXEkMskVW1AsR02DEDwIbGma9nk70rr3cfcDYiB8OcqF\nOPUy3syK/0DohITPtJj4MGuqnmlK345Iw70QacznIOG3fLz/LKUpaRzqMT9Lue/MWmiLhftjckBX\nVLf3Q6a/wShtF6Scjrsgaqj/E34shvJwbVRur4ln1f3Wx6H07E5MhfamU0xvRo0+wEuheB2EFJh3\nKadRHxD+3ogapWL20flI4dgJyYMRSBgPQ72AGUigrxHPR6Oy8j3UgLxGOdW7DyoDHZBSugnK29qy\nvyM1M2ya49OeLfMtlHAroYJrqNW7DSXKgyhjh7n7ljFa/hoyFRyBNL3/odZ4b7Rf++Exg2UJpEm9\nFNPEpsWsCyj/EWVRM+tNWVGnhR/V2RtroxVpJ5hZIUiPRgNLoMK6BuWWtiPRbJviWwUDUMV+K4Qr\nlAK2PRIEnVC3bFzE8ReENgb81jRNrlO88xOk5WyPZhN0AnpWhPTikXY9XbMUpqHBrcXQ1K5zUMVY\nlXKe9OHAhBDG7VBFnYFMPsugwnkU5VziIZSr/4gwTELCaRrqhSyECqPTtOH5UcTvqoj/uSiPfhzX\nf493RpnZM5FWP4mZL+94zQZQZvaK1/yLVAssS6zsM7M/oAZruVAAZiJzQu1Mn1r+AfwvGj/QYNq5\nrs23HkSNxzum1csboAo5LNweYZpr3yvS6lxKIVhMzdwcld2tXdP/MLMp7v5fMzsc6GP6N7BZU/jQ\npn/vmlbZOhK0RaPzIWqUP9aDaoXDUaOyOEq3vih93qo6cvcPTH+44yi/vx5hPdfde0VvfMvoyc1A\nGuz9qHd9RHjzIqVmDuVOqY9R/pHLgsgs9mPUQxyPetrFyuRtI3zTUDk/EfWYqpsFdkE9q2U9pqLC\nrPn9a6FZTEWv+GqUFy+i/PkWMgPeZWZ7etM56Fug6ZovRF17PuL3HFqg197MfgGc7zKPjDWzcWhP\n+lPMbCdvujFhZ1SPlkZ5+SOkPCxENGahCCyGGoXWqdd+05YHlRWqzTw7LxJpU9TSn4y0obdpujJu\nAcqN+JdALeJGqGt3FhrMKv7RaDKqVGNQY/Eh5WDUtZFYY1GX7U0kSLegnOnxI2RfLv51ZgjlZmIz\nKW2wQ2m6v8nMyPRnKfeZeARV8s2RsJ6JKslkyn3cq39o8Wic34nGJ/6E7KqTI+wHxrEH5d/fFaP2\nW8W7xcDvk0iDKf705ApUGfqgxuoiysHdG1Gjcy+VmQqUM12GIWE+k8omXs3k5+bxuyDlpmHnoy79\n/pFHxUyLgyOelxIbgSEzzfLN+Lsd5Z7dhb97NOPupIj3KahHMhCVqUWI2Rp1ltn1Kf+cYb3K/VFI\neA1DQqXYC2YqaqBeRVr8JcAa8c7AGr8fQJrqk0hwTYt0rf6ZzME0XU1ZrJR+nPKPJYZV/JybAdXO\ndbjZMcrNuPjdiVj1S6wGpVw53h14q5J+j0R8iv2Zipk/r1P+5d9bSBgXps0rUB1aNfLwlmocKcvr\nd1GdGEvTQdIrUSM3viYeu1NOluhTOS5AA8uz9m8P99X93E9BAvdZ1BD+gnIvn+eQCa+5tKv6Uczi\nupRyL6spqE4VloQjIp1+Tzmbra59ZXw+2tyPovzHnP8jRqpdLeT9FafF3OI3kPniqyjDJ6Bex5ao\nYvREwn151J0qdlNbDxWoy1zmmKJLvkfcn440xp8ioXIP2mly1grQSpi3Rl2xQkO9HmXATmhApNDq\nCjvqKjVetEcF9FU0lemD8LcH0oDfQXb2nQo/4nkxJjHLZh33F0fzuj+M60LLPxR12YYg09HBSCP/\niLLLN9I1739J1EBW53avjxq2NdF0v0GmfUaOofw/yYJFkbb3U3c/tDbNwr9Brn2ui9kk0HQR0Hci\n7dy1YKkdaoR/U/2W16zIM7OrUXd3OGU33N39h9QQmvPmyHy2l7s/FvcXQoJ2rueDm/bv3g+Vy+1Q\nD+g2VP7eQwK6iHPBcejPUApb7y6oMf0ANe6d0dbRw5Bgu9Hde9d89+soTx5Bmu6NSMs80sx2B450\n9+2YA8zsWSRsbwD+0zpKOgAAIABJREFU6bFtdI2bYj+mO7yyGCie7YAasQVQfexKZTVolKNi24Qx\nNN2K+CDUUK+MGq4ZqP7fB1zpYeKzpn8vOczd147zM1BZ7IRMVKA0L9YlvEllZbGX9utNi/JQE5fv\nIE2+I+XGX9X93N9FiseLaPxlV3dfPXpy61H+R+tXkLY9MdwXPcli/rtFeIuG/AQkGzuisn0jkhkr\nuXsf00KwxbyV7bKB+Sbch7j718zsWyhDTgSu8hhkrbi7DdmdpiCzwnGo2/IwmkZ3BapIa6PpWA9Q\ntnC/RgOtayPNCiRgj0Et6K9QYv7J489rZxPeYv/tw5BZ5jg0eDUcTVkb0cJ7tYOgBR2RnXaaN/2j\n5+PRntxPxvsbAH91903N7BI0EDwsnj1O/EF2XC+KFu5sVhOGIciU0x9pJWeg9HwMmReeoFyhuBGq\nCMVGXUuhirYJqmTjKO2dILthYX/du7hZhL8SzqHxrWLxxfZo6uQM1OAdD/wyhPstaODpespNotxr\n9rI2s1HVRqkezOw41PvrE7cORvOHz54Tf5rxtxiohVjebxrI3RDZVy2++z+k2a1OOX/dUHn9CRIU\nRtPG/w8onTemYpZx98mV76+GeiHFn31MAH7g7qPnIi4bocbq28hUer27Xz37t5q83wk1uqBytfNs\nnHdx/QlPtd5vi8oaSCn5tbs/HH5vjlZ5bxrXf0T18TpUp4stdWfZo5tRsoC69nkfi7T7K1z/d1tc\nD0PWg41CYfkx6h1uj/K2MFv+EPVG+yANfFvUQ70BTW0+Eo0lbO/uk+Kb/0UNwe2oV78FUkjWRbPS\nVjezFdFYVLGiu+U4zCfhPtQ1MHM+WvJ/qzW/F8nCSJP5lrs/Z1q1Wqw4u6ui3Q5FWnfxTyWLoEZj\nKBLu7yHzzJqoG/0kEkivI038EsoBzGKHQ/fYec3M/gwc71qpWhSYS929WHLcUjz7tPBoa2SaWcg1\neLgwmhV0KMr8l1ElXx7YN7Tn6r4l04lVdd7UDvgU0h5PQQXD0fhGT1Rh1kOaw2OoK7kX6jrfEF6c\nhYTEw6ghax9p9kG4cSTgC36KCmMHlNZEus3amyU0jW+E3yfH7dPi3NGg3YZIkA9EQu8hZDbDtTLz\nY0Ta/rGlhrUlTMvHZ+2l483sMdIW2Mdn2CwG3O7uX4/rTij9QGaIQout/rnJANRtXxCViSobo4q/\nVjwH5dfORYP/CcO/DDIdHODu7Sv3i5lOy1Jqnu5aGbwwUppWCaVlTVRu3kfmi25IAI5Gpr6p7r5y\nTW+9IyqTnZBAvAGN/xQm0wO9MiAaYxGbI2Xpd+5+RfRii1WwP0DmSwolzsyucvfvtxL/B1E9HeSa\nQVNcOxLmXZBAXxiZkq6jHBjtjHoM56F6eLdrhtCDwCLRk/0fWpVc7Y2Po+zhdUHyrAvK+xXdvVO4\n+9hst2bjMJ+Eex8U6O5IA2+PhHxzy22L6Un3uvt2pmlaHyK72B1x72lkb76keA1pQlcj00FXpLk+\n4u4DrdxPojANdEH2uulxfwMAn8OpZPViseVotUELDfsKZOIpNNKqBlergfwT/dlBv3jeC2kKU9HA\ndKFt3U252q/4O7qNkfbwNzTV8v7w4wFU+DY0LRvfEQ0m3ocawluRzbmgmOv/T3d/n9lgZn2RNvlW\nxHV3JOCvRA3EBUiTPjXOJwN4DDA2499INK++aOw+tvHS/MRqtjUOYT606G2Y2WaoF2WV156n/Icj\nR72bCXFebGfwEDIl/ptyg7Xj0QykDq7tb3sis089A87VMC+OTJb7orT9F3BD1UxpWtK/q8cGYzXv\nz9psLpSWO1Gv73w02+RWJNB/a9rC4Ap3/1aNH3egslZsdV30ti9EZdeb62lH2V0HNYjrIlPdFFS2\nLkYv/jYE/zCPPXpmkxZXoF7lahGO/ZDV4FKkdf8b1dMewMEee/eYtmM4GdWZnZD5prDr7xHpcTqa\nrbUBpa2fCGPRa38yegz/K3oJcb0ImgL9mRXu7VAGjInKvjTqog1txu3fkB3qftRyPoDsmrsiLf4r\naHHEXkjwbI8E/VD0p75rV/xaEGmKVW0HNK/9Y38HV3lvDWTmWYWmduBtW3qnGT+KXTAXRBr6FcAu\nkWGrEXvHu/tGdfq3ITJdFBrdCqhSXlmjDSyCBt3WQwVyE1QBpyPt6SF33zHc3o62HbgGadBdw12h\nVc4yBSBBsykxP9vdd5ydUKlpyH6AbOpfQmMue6NKMAFVxnWRCaNqhtitxr857m7PTuts6Z25JWzx\n+1COxXwbCcozTLO5VkPd9CamJ4uptl7+TeKtqOd1eLj7LhrrWS00wKFI8++DhOXCpj1aBlfLfp1h\nHksp0B9vwc0jLZkEapWWCJu7TLAj3f0rFbftkBnqyBpvzkLl85uofm8Y8b2K0rS1O03HMKD8j1qQ\njfpeZM4q7ONFIzoDTVk+fjZJgekvLqH5/dy/g8yZxf8N7x/fm44UrONd/1uwIZqAcT9SuDYO9y+h\nMYZulJudFRRhXTDCfhLK/53QrL0fEv8pPbvww/wT7oY0y6Uo/yJuMSrzlStdqKOQCWFFlChdUIV4\nDdnfLjSzF1A3/h6U0D8NU8+VyGY9IPy6CdkBv4tMA71RY/E2Khx3I4F3U4ShaEWHoAweROXPAryZ\ngdcW4lvdBfNSlLELooaiumXx7siGfQNNN416khqiofo5mq5V2ND/goTt/9BADKjR28jdj2lGmyw0\n5ftRgdoJCaNHIm1WRnbhYlHIBOLPuKNRviO+c350y1sUKpGGW3u5S+SmaI5xP1SRl6YicCMss2hO\ng7dyOlof01SyRX02A02z0zrnBdaMLT7uj0RTVr3itmj8f4l6VLj7aWGOm+5NF9WNQKudNzGz/qgH\nvC8aHC+67rMGHucgvBuiRrdWiama/s5H5sJ/0bTxvcXMHkUmr0dCabkYzdj6spn9FWm518Ur+8Z3\nnkCN7WZI2/16fLsdGoA/FTjF3bdqxrR1OhokvQqVmwOQktMJrW6/y8zOaE2Qt5ImpyI5Ux3bGoQa\n5xeQDOuETE//Ruagfu5+UPSmT0S9kKJctti7NJmdz0Gy7l+ot/IMapCGIXNyf29hh8+P+TefhPvf\nUJekOxKoG1NOTdsVrXL8Xs07J6M5yn9AkT0ofmcgQfnjEOh/ohSQ/0AC6mVkryt2f+P/2zv3aDmq\nKo3/dkggJLwCKoOA4SHoQHhIZGAAYQCjzOADlTAaRBFUUER0lqhhhIAyMmBcSwIODIq8BmeGIAoB\nHSMq8gggJIEEIiiE90Mj8kwgCOz54zvnVnV1dd/u2923+94+31q97q3qqlOnq07ts8+3Xy7O/w6y\nUnIRsUqUe5YrfWEZZdTE7402hvh3HTQRzSPzZf0TWeRdfChRs6xaIVh1bu+fh+NfQvcyTkJrhPZe\nCPtfJKetmoKt4qrlVnd/MuyPmgpI67w2tBGzTX4ELUd3NbNV7j4hnFcqVHLaejFL5CxyAtfMtkQp\nil8K22sjv/QHC+3NQs/qLe6+rTVgaKqndQ4ngpLxeXd/ImznJ/9xaBl/i7sfGZ7zLu7+5nBspNQu\nQxTN5uHvQ8iLYmsz2x3lH9mHJhAm/y8hCm9Am8yvhqzcjuQuY/g0KussTCPLrhrzuY9BLrfXexbw\nNx9x6U+ECfFctNJ8BSkUH3JVQStSW/lAxXzU82r0ToGow7HoPTgL0b9Xm6LT6wm/89CksWHYfiH0\nfQJZnMoXEeWzX7xPZnYxsvNdiQypz4a+RN/8h9E7GA3P491947DqvRopnTcj2uYE5GVzlLs35tue\nQ7dS/u4WZvbFyOC3M5qh4vLyk5YFIEUcHDSZ/0Gc1WFoVt8XCeO5QYPfOBx/OJr5/4yWdd9DRsL3\nApeY2RS07FnPM8PpGmjw7Bu244OdZ2afRVptqcfCIIh89KoghN5Hlmt7oEgEerh5W4ADz5nZzu5+\nR2WTWW7vgDXNbJkrD/6GaCIbj4T/MnR/NyUUybAs+GkiorXGAvuZ2aeRJpWnrZ5z9y+Ge/IqemE/\nBTxrCn0nfLc7wRBaRFim3k54EZBP+jIzO7qgSc9FWlxErYCcD6B7uCi0/3jQ7KpgmdfS7SZeuErr\nLDuv3TCzeeiZrgssMxnVVqMl/2+QwJiNaMjDTC6SGwDjLDO2TUbP0lFagrtMEd7noKC2m8hcKZvF\nCnefV+8AD2H7Jb9tDKJDI69syH7wTsRxe6DStnH3a81sgpmt6zI4bx4nOjRW56IJImZ5PCQ8w4PI\njJagILhDET15BhK0H0Dae/Rs+SbyAotK0HEme0esmbBnuFZ0KogFgc5DyeGiPeouVA1pctj+h9B2\nNJJGw/79ZN5mK5GCGSnE7cgqps0ic/IA+chv4u7x990bWIuxKI34Q1Su5gfl3Lsl3P8aBKkjYbwe\nmhXXRRz65mQ1BiPPFjXRA9GD+CDS2l4ws2PQSuB/kdCJAnIK0kpvC/9vgJaDX0PRd28E7gorCRCH\nvCI3sRxLpbA9PvcbHGkJjWCeKQ/zt5Aw2ggNxgO9skjEVLI8KIZyqywBjjKzuV7pspfP7R01utut\nOr/2JkjgXoqWk1fl2jgEDfy/QZraHki7ehAJ+HHhuMmmghkezrkRPbOb0LL61UaESngBloX+fjD8\n7qLAjcU4rgjnvGxma5Y093IQGB7am1hyTEQ+dcMqRAOR2zcswh0J7jJsg4x2p6MV0hLE4Z6NvKjK\nXBp/4oGXd9Uy3RMJs6PIGeKbxCxT2ofIHxPaz+cv2gxpwHEFdAOyETxqZl9298sIKTNM9RPuCs/p\nU0i52hBRGpsiDX1/pGz8HFE2n0Vjdn64zrHonb8QrRQHqkghevXM8Fk/9GkGEqjxugcCO7uivjFR\ntYvd/YSw/RlCQaCwfW74TWtHwR6Qr92Ku18XxtzuwB2W1XDIp2m4CSmh/xe+2xs4w93nmdlJ7r6X\nma0K7b1iQkzLQjgnn2rlbppAt4T7HKQFv4FspssLtH9z91NggPO6hizvw7Rw/AXoQYK0nu+4+xlm\nthWZgNwTaUOrUaDUuSjD4BnAVqbQbiMLib4fCd910QOqyAPSAu5Bs/6PwvLrX5C3y4FWWSTiKSrz\noMwKv31vJJjzwn0qsMDMHg7bsYDzIYh6ega9HI8gbTmmXPgr8FrgSE9AmlQ03sWAo6Xuvk7ch7jQ\nyQSvg3CPHGn8RyAvnAr/7AZQS+CuiTydYtDK+8mSNOVxWRgPGwTBEZN2VcGzJFFFY+UkKitmdRSe\npRbIJ/3aFj3bu8kmfwe+6+7fqNWWmY0xs8PItLl1EGWwLarINJQVyScQVTCOXGAYlZPfBci1MMY1\nfDTsmwZca2ZfIrMZPQrcGFYsn0O06nHhXvzBVBoSd/9c0MzfgQT/JyNlA/w6rEjPLHbWVd3pIuRx\ntQ9SUv6ekMLEzF5GY3Ommb3osuOtX2hmElJU4ip8nbBvmSm+5ZKwfxyAmW2R+93LyaqzRZyJUpdM\nRDalCciWsjT05SPhfq7MC/Kw6n2JyjxCT5IplKX0bF14B9MM1PugQXQMomI+SBba/SCVKUXXQkJr\nQjhuG6SJb0IosBuOiyHP15MVX16KNMHfIN7rSfQSGTJsLkKGu6uQ1hoLEQ8Ucw7bx1Bd7PmzTfzW\nmE5gL2S8XICWhSeTS5OKJoFxhd8eQ8sXF9qcXONzZ/j7FBr0P0AT3AWI188XmphPSOEQthegFdQV\nZLng76WyGPflKIgrLrnPDvf3R2igj29yHFxUuLdvC/18JHwWkEsrXDh3GhKIs1EwyGDXqkqfXLZv\nGMb+wjCeNw3j/WbkoQLyjvgxhTTUJW18DE3gd4TPM2EsRde6HwyhX4MWgaC8RkJ89x4ofJ5GQnMW\nEvSzkD0FQmxESVv/RUhRHbZ3Ay6u05+B35v7f3H4rEQUzzNh+6LQr3/Onf8JRP9cmPv+cPSOzwnP\naiGivc7NbX8nP25L7sWh4bfnawYcgDj360IbMU3FTYiWKS02P+RxNsyDesOSz3+jpdmGSOieRGYl\nPzkM3JmFdq5DQruiWnz4f0BAIg15CdL8TkbayBfICmh8Bmm0z1MoNkHupa8xoBsWCmQVd04DZhT2\n7UMoEkFlHpRZFPKgNHitYn7th1HAF+Fexrz5c5BAXo008jmIuzyHyhzxMRf8moivPx8JpGuQofrh\nMDj3RZrz3CbHRKnAJZQxa/P4uxOYVBiPS9t5jQb7EcftsSgQaUnoW5z8S/OIl7RTUemqDf26YLB2\nyFwM1yBX8WiQc9ZBOZFOCO/ntDBOHwnfP49oj/hxstS3r6HkfoP1vagkTEIKzmFIEXxf+JTlKYoF\ngd4fv0er/x+HsbgUUWPP5rarcseH8/K5nX4anlG+QFAs97crUpJiPvvjgRNyx00vtPvNpp/nMA/q\nB9BSJj+7xyIGfw2D/CU0i1Ulacq1E5MQPUth1qNaQC5DguhLZMUyzkRa6T3hYdwZBsPWYV/FSx8e\npOW216BOsqyS/l6NBOhyJHjXokZipzCo4m9/e4v3ex9ENcUVyU3h3j2Ekk/NCoP24/lPSTsHklVc\nWkWYBCl56cr2DdLHosDdGk3Gi5B2cyYqml08LxZveJYGS8khbfceZJD/Rvj/sOF8B0I/8km/ticT\nGFWTfxNtHogmilj45aQh9Ot34X28l9rFLyajle4KsopHm+fub/4zEykCD+U+sSzipwrvVK2V6GQa\nq9KVV8amhHv6GlJeFqJYlrLzqiYmNIHdi6jDLUMf7kfU35b1+oX89h9Dgj3ez1W5+3kfopNvDe/T\naWSK0dO5dhYV2m0owV3FOcM9sEtuRvFBXo5c4+qdMx3xZNuHgXwNuWUsNQQk0kzmI6EwgWyJlX/p\nH6fw0qOlf8zjvH/4/9tN/MYBSilsb0Iu41yH7++A8ETawgNouXpDGLC75o79BbLYx+1J4WUcqLiE\nsjjuEPY1tYSu0b+iwF0ZxsCW4fM14NqS8+4D/nYI96Ot2u4Qn8neSEB+JWz/Krz4g07+Ndo7FxkR\nH0ET9lJCeb8m+zWoYEUacnH1E2m+s3Kf7yFFLa6oJyLbT8xyugYKv+/EOF+AtPQXUYbZc9AqOM8Y\njA9/o2IX928RxuONhfZvHEKf4j3cKvy9Asmb/0CUzVnAnNzxNSuwFbcb+XTFzx0GjFnRXQ8Ad7/e\nlMs7VvpZCdVh5Zb5i++FBMJspKnUjDIN5xUjY38Q+nA+EriRJ7sceNSzTHRjkBfCQE4SlFvmVXoc\nVulfvgaKjv0TWu1MRZPiU+HYO1Eysz1y5y9GBqpb0AorulOuh3jTSUhrjtV37g3HVzyzQfq4HZmL\n5HHuvk3h+4Hsf7l9PeGz3g6YcrIcgFaLfzCF5u/g7vMbPL8sjuJnPkiFqSH2tSwHVNW+sH8p8Ji7\nH2Ah0R0KcNrJaiS6a6Ff+XH+acT3LyFzoX0jWTS3oxVhPjgyBs/FUnv3UBl5ugMy+J7PIG60VhmN\nHo/7uuWC10xZTc/2Sm+3n7r7RmF7oFpd2XYj6Iq3TIm7XvTz3g/x4YMh7xb5PXe/xsxOHewkl3fC\nH5E/8FjEtT1PFlocix28DU0CV+TOOx9pBK8hw1PPC3ao8i8/AvHw3w1fX460vuj58Ao5b4Lgl+xI\n69kbuaQ+hYxADyF6Zi30PH4b2q5KE9tAH/MukltZVrEJ5Fo5kNzLesRnvRWYommLSb/w4A3h8vl+\novzsUhTjKP6ClJVOYIyZTfIs0nhDasuR+4G9godJzHu+HMDlwjxhKB2w2gFItyMB/CSyoUXPk48C\nU939A4Xjz7QQHOkqcHMioi1vRva6vOfQ3ujdOIDankRl0egHk6X/vQvJnCco93YzUxwJgJvZc7FZ\nKuNOGkK3IlSXohn1Fleu8rcig0GtFLnF869Gs+009DBeRFGtOw1y3unIz30ZlTk9amUenOnKBXIg\nEoL3oxu9JYoa+1kj/e0VBJey7XLbNyEaJgZxHYtWQZeh3/kOpAV9ONfMWDKj13Zo5XNO2HcQmmwH\nzXtRp4/PUx1hG939HBl54/9WeTbuJfncew2miMyY9OtoZOtY4Q3UxazR3oloib8/mrgdPYeT6p44\ntGuVRhq7+yWWBWmBPKymIPuJoWCdK4Fjwqp5IJ31EPrw8fBvrQCkmSjae0+yqO3fu1ICbIPiY64O\nbZWyAMg4m69z0FCK6XqrKFP2y5g3KV87uKoWgg+SkrghtIvzapKLui38vYPg9khzBsohcdiIMlir\nietEr4YBzjlsb02ugvpI+VDNkR+OVi43hM99SEC8J3xeV9LGLWSupktQrMJDYXsiJR4Ebf4N24e/\nZd4RTbv/dek5LIz3L7fvthbam45SD0CDrpQt9r/UboFWwPGzJ1muosVhbK1GE/WNYXtqi/24hVAF\nKmyPC/vyni7PIE35pXDMBHLeb9TwZKPgOVTcrtOnW3N9i7Vo7yu5PwOf3Lnj0ermbKRUjW30XpR9\nuhXE9KgpYvMnwC/M7Gmy4tGDwt1XkVsOeePL2OVoAKwe7MCAqBk+75WFD5aT5YoYSShbCq5ANNSr\nqLjJA2jpOBbRV7j79bk2xnuWL9yQp8rrw/arVGvT7cYlaLW2o+cqBbn70yEoZCQgBno9EVaFj5Pl\nMBkKTnT3uUH73A9pn+dQmRu+bfAcjVbAw1TmBfo9irr9FaIyxiLj4hMMPYo2j1oBSJeS5ci5Enm9\nXBP6vsrM8mM0Hxx5uil/zRiqI0+3BT5uZn+gforpq4NsOwMZT0H0DF4jfXUOF6GxcQOaGLenMsV2\nU+iKcPeM+zo5LFXWRyG6HUGOo1uFHlgxvLqYdnTgq/D3djP7KaIrHGlKt0X+10cAzxtwQGH7u4g/\n/0nYnon846+jklfMC/eVZraLK1PlBUh7H2uKJD4IGZw6ifhiNsP99hpONbP1UWj5WWR89FAxJBtU\nB1DMC7QCVVI63yqrkq3L0KNo8/h3YHGQIYZ48VNQhGssKv0CciB4OWxvTaVydwh6L2a76KJNkM/5\nvYVrxXQojw3Sp9kofuYdiLu/AfHuJ1qWwiOimHJ6O8/KBp5PxtUPCV17Gaw6XeumZGkx242YJKiY\nW2UwREEyHqUYjobXFSji9b0Mb26SluAFHs/MtvJKDv4U4GF3r1ca7QsoSVv0PFgbaU5/oTr3RycQ\nX45vAzebMixClmVyJGA6IekXsG+YmGZTWcKwGdTSPocbY9395dz2SSiv0keQX3tEHIctvTtBdvyM\nbIXyFXd/0swesSxHznz0/m9mZpeSpdeObTTKAjTKLFyEVvUxMd8MwoTg7qWJ7XIYWMm4cs00eMly\ndMugOosm07W26boTEfdWUVQ6POCy409w92820O5Mdz+tvb3tPErcsRYAD7r7jDrnlOWRP9SbzB0+\nVORdwqzShbJmLdteQzPuhA2215IrZbtgZr9AdX6vCtvXIQrtx4gCfJA2Gr0tVGcr7kPCORZPj7VN\nVyOBeysyAH+6HX0o6VOF00KtfTXOfZXMecCQ4hSLd+Q1/IbQLc294XStbcYvka9t5Ix3AK4zs0dd\nZcF2BN7n7qeGfg0q2AOmI4PMSEPk4OP9WBd4u6lwuZPVnszTVhcjoR615BlUVsHpNAY0wzrcb6+j\nrZRSCzaoduNo4FIzix47r0eTzH2mKk2l6YKbRVAwJgCvM8XLRBV3PcQAbOZZzvfl4bubPPOQeXs7\n+lEDpdlaGznRc7Vq24FuCfdm0rW2E3ljICiQ4U+E5ZCrIMAPkWGxGXTaiNgpRA7+QzW+j8FN2yID\n3cZouf9hsknw16bKQG1DmGS3oLIaUIw52L2d1+oSRjKlVBPufj+we3D/A9kTYrrmYsbIeE6jNRHy\nOIosAGkhlQFIZyEFZbsw+T+DPMDmBFfNj5Y32RqCe7cjh43otOBk+feHHcMu3IOl+mprMF1rm5E3\nBoJScD5FZYDAK9WnDYruhPm2iBwHH0saltJWKOXA8Sg/zt1Ia/owMgw2rJk0AlPU8I7hOjWDRUYy\nvEbhkm72qR0ws42prKm7Nyo6cj+KDQFNYnmjZKM1EQbgSv9bLwDpc2SeLm9Cro2OxvmNyKOm3XhP\nB9psCcMu3IPGPh1FgD2HghtO8gbrAraIojFwC0QrzAIws4MZ2nJ2pGruRRRpq7WRQWqcu/82GHim\nokjdV0zVgd6EqsYspYmUA3WweyP85EjHCKaU6uFC5EH1r2F7Gkqg9W6kJHwM2dpWIS+Sc1u8XqzO\nVnQBzXuFzSCjFx8K4/SYFq9bhaKzQi+gW7TMIuAZdz9+0CPbi6VoQEVj4BzkR/pWM3sMeesMZdk2\nd/BDRgQqaCvPQsSfCC5kjl6cf0SRvod3oA8355bUCSMLr3P3y8xsJoC7Lzezl4JQjTV/Tw/HzkCe\nJYe0cL1SF9CCoK2whbmK2vd8FHM70LUaqsChNoS6gC2izBj4FDL8jHHVc6xCnnNu0fDa66igrYLh\n6UWk6ZyHPBAWECbBDmkrFyMB/yT1g0USeg8rzWwjAk1plTV1izV/22Gr6RUX0J5Et1whJ5ft7/TS\nxrIC0vni21+msnwdrnJc+fN+Q+Cco7uamd3l7lM62d/hhpntigp2RNpqE1S1ZmH4fiJ1JsE29eE+\nRNktJePce3LZm1AJM9sFGTSnoOjQ1yPqZEmJ2+1uKM/Mx1q4Xk+4gPYquhWh2q0XdVHQJqLb5aYo\ng9xgbpgTcpxzxFAMr72OIm31NLC/me2TPyjeh+Ik2CasiH7SCSMOWyPKbnPkgbUbmYwprfnbiq2m\nh1xAexIjJVy7XYjGwGJR6YOpP8D+nOOcWzG89jqKtNVslLHwsppntB+LgzvqPEZYKt+EgRw3k1DK\n23yOm2Lqi4QOo2vFOrqBEjpoLWQY3Cb8vxKgGEFnZlshznkPpM1GzvnBDnd5WNFKdF0b+3BBye62\nRTUmdA4xytbMTkNUyQ9bibxNaA19pbkX6aAQRHIPyjfxdVSJ6Xcl5y0H3jkcnHOXURpdFyICj6S6\nuETbBW67ohhWoplOAAAEwUlEQVQTuoJk4Owh9JXmXkRO04iJ9ccBN8QoyILhtQod4py7BlMZsLdQ\nTVu9ERU8eIHcJOjuQ05HWqcPc0p2Pwvc7u5Xtvt6Ce1DMnD2FvpKcy9BzML2jJlNQcbVN+S+H458\nN72EWrzoNciXeJ67XxQ48Rs61IfxyOUyxg58CNFgO5nZvu7eSmrchA4iGTh7C/0u3M8Lxp+voVTA\n66A0pQC4+ynd6lg3UMuLycxWhUCUWpNgO7EjsGcuBcI5aCLZC3nzJCQkNIC+Fu7u/v3w7/XUyXEx\nnJxzj6LuJNhmTArtx+CXicCG7v6qmTVaQSshoe/R18YOMzvOzNYz4ftmtsjM3lVy6CWo9Ny7UVX1\nzRiZZfaGBHf/vrs/7e7Xu/tW7v4Gd281L0gtnIGSPl1gZheipE/fCsbsazt0zYSEUYd+N6je6e47\nmfKXH40000s8FIPIHVfX8DraYWbHoYRQz6PsnbsAX+2UoSwY4v4ubN7m7o/XOz4hIaEafa25k2Vz\n/CfgYne/O7cvj6LhdX06xzn3Io5w9+eAdwEbAYeh+pWdwhhUyvBp4M0hdWxCQkIT6GvOHVhoZvNR\nrumZpmpQr5UcN5yccy+iahI0a7HAY60LmZ2OAsuK+dyvr3lSQkJCFfqdlhkD7Awsd1U+3wjY1N2X\ndLlrPYUQNbopmgR3QmX1rnP3qR241r3Aju6ejKcJCS2gr2kZd38N+COwXVj6bw9sUDyuCcPraMWR\nwFeBXYMv85pApyJJl6NSZQkJCS2gr2mZHAWwjCzxfxkFcIS7nxkMr5FzvgRVKRr1cPfXzCxOgh0Z\nM2Z2Frr3q5C3zC+pTBz2+VrnJiQkVKOvhTtwEPCWBiiAYeOcexFNTIKtINZhXYjsGgkJCS2g34V7\npAAGE+6NGl5HKxqdBIcMd78I6hbpTkhIaAJ9KdyHQAEcSWZ4XRUMr/2UvbDRSbAdqFWke49huHZC\nwqhBXwp3mqQAhoNz7kV0iQevVaQ7ISGhCfSNoMqjWQpgmDjnXkQ3ePBaRboTEhKaQL/7ud8CvDNq\nima2DjDf3fcoHNfXvte1JsHgFtnua9Ut0p2QkNAY+trPnRIKACijAPrd9/qXiPuOWJvOJfGKRbpX\noxQE/4miVRMSEppAX9IyOdSlAJLv9QCGkwcvFumegWIKpnfoegkJoxL9Lty/AMw1swoKIPd98r0W\nhpMHn1IoyP1rM1vWoWslJIxa9LtwjxTAu5G2eBU5CiD5Xg9gsEmwnSgt0t2hayUkjFr0u0H1MiTU\nLw27ZgAbuPv0wnENGV5HK0IlqmPJJsGbgbPc/aUOXKtWke5XAHf3Hdt9zYSE0Yh+19wbpQD63fd6\nOHnwWkW6ExISmkC/C/dGKYB+970eNh68VpHuhISE5tDvwn0qsMDMKigAM1tKJQUwnJxzLyLx4AkJ\nIwz9zrlPrvd91CKHk3PuRSQePCFh5KGvhXujaNTwOlrR6CSYkJDQO0jCvQGY2bIC51y6LyEhIaFX\n0O/pBxrFIjPbPW4kzjkhIaHXkTT3BpA454SEhJGGJNwbQOKcExISRhqScE9ISEgYhUice0JCQsIo\nRBLuCQkJCaMQSbgnJCQkjEIk4Z6QkJAwCpGEe0JCQsIoxP8D9UGUiU1jhpkAAAAASUVORK5CYII=\n", | |
"text/plain": [ | |
"<Figure size 432x288 with 1 Axes>" | |
] | |
}, | |
"metadata": { | |
"tags": [] | |
} | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "KHXlMBwI4RFO", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"#we do not want to rank normalise the return or PERMNO\n", | |
"tmp_df=data[['return', \"PERMNO\"]]\n", | |
"#redundant columns\n", | |
"train_df = data.drop(columns=['permno','date','return', 'PERMNO'])" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "GjeqD6JQHYHW", | |
"colab_type": "code", | |
"outputId": "2fbfbfc1-c4d9-48f0-e785-ec54e0c5cccf", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 338 | |
} | |
}, | |
"source": [ | |
"train_df.head()" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/html": [ | |
"<div>\n", | |
"<style scoped>\n", | |
" .dataframe tbody tr th:only-of-type {\n", | |
" vertical-align: middle;\n", | |
" }\n", | |
"\n", | |
" .dataframe tbody tr th {\n", | |
" vertical-align: top;\n", | |
" }\n", | |
"\n", | |
" .dataframe thead th {\n", | |
" text-align: right;\n", | |
" }\n", | |
"</style>\n", | |
"<table border=\"1\" class=\"dataframe\">\n", | |
" <thead>\n", | |
" <tr style=\"text-align: right;\">\n", | |
" <th></th>\n", | |
" <th>DATE</th>\n", | |
" <th>mvel1</th>\n", | |
" <th>beta</th>\n", | |
" <th>betasq</th>\n", | |
" <th>chmom</th>\n", | |
" <th>dolvol</th>\n", | |
" <th>idiovol</th>\n", | |
" <th>indmom</th>\n", | |
" <th>mom1m</th>\n", | |
" <th>mom6m</th>\n", | |
" <th>mom12m</th>\n", | |
" <th>mom36m</th>\n", | |
" <th>pricedelay</th>\n", | |
" <th>turn</th>\n", | |
" <th>absacc</th>\n", | |
" <th>acc</th>\n", | |
" <th>age</th>\n", | |
" <th>agr</th>\n", | |
" <th>bm</th>\n", | |
" <th>bm_ia</th>\n", | |
" <th>cashdebt</th>\n", | |
" <th>cashpr</th>\n", | |
" <th>cfp</th>\n", | |
" <th>cfp_ia</th>\n", | |
" <th>chatoia</th>\n", | |
" <th>chcsho</th>\n", | |
" <th>chempia</th>\n", | |
" <th>chinv</th>\n", | |
" <th>chpmia</th>\n", | |
" <th>convind</th>\n", | |
" <th>currat</th>\n", | |
" <th>depr</th>\n", | |
" <th>divi</th>\n", | |
" <th>divo</th>\n", | |
" <th>dy</th>\n", | |
" <th>egr</th>\n", | |
" <th>ep</th>\n", | |
" <th>gma</th>\n", | |
" <th>grcapx</th>\n", | |
" <th>grltnoa</th>\n", | |
" <th>...</th>\n", | |
" <th>pchsaleinv</th>\n", | |
" <th>pctacc</th>\n", | |
" <th>ps</th>\n", | |
" <th>quick</th>\n", | |
" <th>rd</th>\n", | |
" <th>rd_mve</th>\n", | |
" <th>rd_sale</th>\n", | |
" <th>realestate</th>\n", | |
" <th>roic</th>\n", | |
" <th>salecash</th>\n", | |
" <th>saleinv</th>\n", | |
" <th>salerec</th>\n", | |
" <th>secured</th>\n", | |
" <th>securedind</th>\n", | |
" <th>sgr</th>\n", | |
" <th>sin</th>\n", | |
" <th>sp</th>\n", | |
" <th>tang</th>\n", | |
" <th>tb</th>\n", | |
" <th>aeavol</th>\n", | |
" <th>cash</th>\n", | |
" <th>chtx</th>\n", | |
" <th>cinvest</th>\n", | |
" <th>ear</th>\n", | |
" <th>nincr</th>\n", | |
" <th>roaq</th>\n", | |
" <th>roavol</th>\n", | |
" <th>roeq</th>\n", | |
" <th>rsup</th>\n", | |
" <th>stdacc</th>\n", | |
" <th>stdcf</th>\n", | |
" <th>ms</th>\n", | |
" <th>baspread</th>\n", | |
" <th>ill</th>\n", | |
" <th>maxret</th>\n", | |
" <th>retvol</th>\n", | |
" <th>std_dolvol</th>\n", | |
" <th>std_turn</th>\n", | |
" <th>zerotrade</th>\n", | |
" <th>sic2</th>\n", | |
" </tr>\n", | |
" </thead>\n", | |
" <tbody>\n", | |
" <tr>\n", | |
" <th>1</th>\n", | |
" <td>19860331</td>\n", | |
" <td>11960.000000</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.255908</td>\n", | |
" <td>-0.257143</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>...</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.055511</td>\n", | |
" <td>1.891760e-06</td>\n", | |
" <td>0.044776</td>\n", | |
" <td>0.031004</td>\n", | |
" <td>1.021089</td>\n", | |
" <td>1.079774</td>\n", | |
" <td>1.023392e-07</td>\n", | |
" <td>39.0</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>2</th>\n", | |
" <td>19860430</td>\n", | |
" <td>16330.000000</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>7.897668</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.368892</td>\n", | |
" <td>0.365385</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.251252</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>...</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.037231</td>\n", | |
" <td>7.315091e-07</td>\n", | |
" <td>0.145161</td>\n", | |
" <td>0.044548</td>\n", | |
" <td>1.033817</td>\n", | |
" <td>1.745333</td>\n", | |
" <td>7.467463e-08</td>\n", | |
" <td>39.0</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>3</th>\n", | |
" <td>19860530</td>\n", | |
" <td>15172.000000</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>8.472954</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.388370</td>\n", | |
" <td>-0.098592</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.251604</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>...</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.048336</td>\n", | |
" <td>1.215981e-06</td>\n", | |
" <td>0.022727</td>\n", | |
" <td>0.011246</td>\n", | |
" <td>1.184555</td>\n", | |
" <td>1.502285</td>\n", | |
" <td>7.649551e-08</td>\n", | |
" <td>39.0</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>4</th>\n", | |
" <td>19860630</td>\n", | |
" <td>11793.859375</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>8.250098</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.400748</td>\n", | |
" <td>-0.222656</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.273223</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>...</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.062245</td>\n", | |
" <td>2.744328e-06</td>\n", | |
" <td>0.115702</td>\n", | |
" <td>0.038863</td>\n", | |
" <td>0.959128</td>\n", | |
" <td>1.756198</td>\n", | |
" <td>7.360224e-08</td>\n", | |
" <td>39.0</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>5</th>\n", | |
" <td>19860731</td>\n", | |
" <td>11734.593750</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>8.113567</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.476698</td>\n", | |
" <td>-0.005025</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.272432</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>...</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>NaN</td>\n", | |
" <td>0.049174</td>\n", | |
" <td>1.270483e-06</td>\n", | |
" <td>0.042553</td>\n", | |
" <td>0.020357</td>\n", | |
" <td>1.044263</td>\n", | |
" <td>1.239524</td>\n", | |
" <td>2.000000e+00</td>\n", | |
" <td>39.0</td>\n", | |
" </tr>\n", | |
" </tbody>\n", | |
"</table>\n", | |
"<p>5 rows × 96 columns</p>\n", | |
"</div>" | |
], | |
"text/plain": [ | |
" DATE mvel1 beta ... std_turn zerotrade sic2\n", | |
"1 19860331 11960.000000 NaN ... 1.079774 1.023392e-07 39.0\n", | |
"2 19860430 16330.000000 NaN ... 1.745333 7.467463e-08 39.0\n", | |
"3 19860530 15172.000000 NaN ... 1.502285 7.649551e-08 39.0\n", | |
"4 19860630 11793.859375 NaN ... 1.756198 7.360224e-08 39.0\n", | |
"5 19860731 11734.593750 NaN ... 1.239524 2.000000e+00 39.0\n", | |
"\n", | |
"[5 rows x 96 columns]" | |
] | |
}, | |
"metadata": { | |
"tags": [] | |
}, | |
"execution_count": 22 | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "UYZyjDCi-Cdm", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"for col in train_df.columns:\n", | |
" train_df[col]=train_df.groupby('DATE')[col].apply(lambda x:x.fillna(x.median()))" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "dWFbH0G1Npmh", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"Some columns still have missing values. It is because those metrics were not recorded in early years.We filled thos missing value with the total mean of the column" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "PlqZ1xulFRtI", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"#replace all missing samples with 0\n", | |
"train_df.fillna(value=0, inplace=True)" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "YASkyyi5PUe1", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"ranked_train_df = train_df.groupby('DATE').rank(method='average', ascending=True, pct=True)" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "evs3I9eiPUe3", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"ranked_train_df = pd.concat([ranked_train_df, train_df['DATE']], axis=1)" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "I67q7qHctWho", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"ranked_train_df = pd.concat([ranked_train_df, tmp_df], axis=1)\n", | |
"train_df = pd.concat([train_df, tmp_df], axis=1)" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "ObmLNipKPUe5", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"Finally, in this paper, the author rank-normalize the each cross-sectional characteristics. So we did the same. We make sure the sum of the second axis==1" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "_OMn7ZxhPUe7", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"# 3. Model" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "eguSKm-MPUe8", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"We build the model based on the PyTorch v1.4. The implementation is simple, and contains two classes.\n", | |
"* `Perceptron`: a sequence of Neural Network\n", | |
"* `Antoencoder`: The main model.\n", | |
"\n", | |
"There are several modifications:\n", | |
"* I am a bit confused about eq(16). Because Z is rank-normalied, so I think x = r*Z is enough.\n", | |
"* We can flexibly change the structure of beta nueral network by changing the parameters.\n", | |
"* The paper did not mention the initializer, so I just used the default setting\n", | |
"\n", | |
"Both are subclasses of torch.nn.Module. Please refer to my comments for details." | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "oYbx_zecPUe9", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"class Perceptron(nn.Module):\n", | |
"\n", | |
" def __init__(self, in_, out_, hidden_):\n", | |
" \"\"\"\n", | |
" This is to build a multi-layer network\n", | |
" :param in_:int, input dimensions\n", | |
" :param out_:int, output dimensions\n", | |
" :param hidden_: list or tuple, # neurons for each hidden layer(exclude the input and output)\n", | |
" \"\"\"\n", | |
" super(Perceptron, self).__init__()\n", | |
" self.sequential = nn.Sequential()\n", | |
" for i in range(len(hidden_)+1): # output layer is not included in hidden_layer, so #layers = #hidden+1\n", | |
" # define the dimension of each layer\n", | |
" if i == 0:\n", | |
" input_ = in_\n", | |
" output_ = hidden_[i]\n", | |
" elif i == len(hidden_):\n", | |
" input_ = hidden_[i-1]\n", | |
" output_ = out_\n", | |
" else:\n", | |
" input_ = hidden_[i-1]\n", | |
" output_ = hidden_[i]\n", | |
"\n", | |
" # no batchnorm or the activation for the last layer\n", | |
" if i == len(hidden_):\n", | |
" self.sequential.add_module('linear'+str(i), nn.Linear(input_, output_))\n", | |
" else:\n", | |
" #dropout layer\n", | |
" #self.sequential.add_module('dropout'+str(i), nn.Dropout(p=0.3))\n", | |
" # add the linear layer\n", | |
" self.sequential.add_module('linear'+str(i), nn.Linear(input_, output_))\n", | |
" # add the batch norm layer\n", | |
" self.sequential.add_module('batchnorm'+str(i), nn.BatchNorm1d(output_))\n", | |
" #dropout layer\n", | |
" #self.sequential.add_module('dropout'+str(i), nn.Dropout(p=0.1))\n", | |
" # add the activation layer\n", | |
" self.sequential.add_module('relu'+str(i), nn.ReLU())\n", | |
"\n", | |
" def forward(self, x):\n", | |
" return self.sequential(x)\n", | |
"\n", | |
"class Linear(nn.Module):\n", | |
" def __init__(self, in_, out_):\n", | |
" \"\"\"\n", | |
" This is to build a linear network\n", | |
" :param in_:int, input dimensions\n", | |
" :param out_:int, output dimensions\n", | |
" \"\"\"\n", | |
" super(Linear, self).__init__()\n", | |
" self.linear = nn.Linear(in_, out_)\n", | |
" self.dropout = nn.Dropout(p=0.3)\n", | |
" def forward(self, x):\n", | |
" x = self.linear(x)\n", | |
" #x = self.dropout(x)\n", | |
" return x\n", | |
"\n", | |
"\n", | |
"class Autoencoder(nn.Module):\n", | |
"\n", | |
" def __init__(self, P, K, hidden_):\n", | |
" \"\"\"\n", | |
" This is to build the autoencoder neural netowrk with a multi-layer beta network and a single layer factor network\n", | |
" :param P:int, # characteristics\n", | |
" :param K:int, # output factors\n", | |
" :param hidden_: list or tuple, # neurons for each hidden layer for the beta network(exclude the input and output)\n", | |
" :param dropout_p: the dropout rate\n", | |
" \"\"\"\n", | |
" nn.Module.__init__(self)\n", | |
" self.beta_net = Perceptron(P, K, hidden_) # for beta nn, input Z: N*P, output BETA: N*K\n", | |
" self.factor_net = Linear(P, K) # for factor nn, only one linear layer, input r: N*1, output f: K*1\n", | |
"\n", | |
" def forward(self, z, r):\n", | |
" \"\"\"\n", | |
" :param z: N*P tensor\n", | |
" :param r: 1*N tensor\n", | |
" :return: P*1 tensor\n", | |
" \"\"\"\n", | |
" beta = self.beta_net(z)\n", | |
"\n", | |
" zz = t.mm(t.t(z), z) #P*P Matrix\n", | |
" zz_ = t.pinverse(zz)\n", | |
" zr = t.transpose(t.mm(r,z),0,1) # P*1\n", | |
" x = t.transpose(t.mm(zz_, zr),0,1) # 1*p\n", | |
" #x = t.mm(r, z)\n", | |
" factor = self.factor_net(x)\n", | |
"\n", | |
" return t.mm(factor, beta.t())" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "cIgj6JycPUe_", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"# 4. Experiment" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "YKM3Aig0PUfA", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"## 4.1 Training" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "15BNNnXBPUfA", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"We trained the CA0 - CA3 mentioned in the paper. Below are functions for the training and evaluation process.\n", | |
"\n", | |
"I choose the 0-636th month as the training set, and the 636-677th as validation set. " | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "EdfdmNV5XnOW", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"**Define** the evaluation function. " | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "kwA7wes7Xgkp", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"def R_square(pred, target):\n", | |
"\n", | |
" return 1 - np.sum(np.square(target-pred)) / np.sum(np.square(target))" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "mjwmFuJev9Cm", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"ca1 = Autoencoder(95, 6, [32]).cuda()\n", | |
"\n", | |
"loss_fn = nn.MSELoss()\n", | |
"\n", | |
"decay = 1e-5 # weight decay for L1 LASSO.\n", | |
"\n", | |
"optimizer = optim.Adam(ca1.parameters(), lr=1e-3, betas=(0.9,0.999), eps=1e-8)\n", | |
"\n", | |
"epoch = 0\n", | |
"\n", | |
"if os.path.exists(\"./ck.pt\"):\n", | |
" checkpoint = t.load(\"./ck.pt\")\n", | |
" ca1.load_state_dict(checkpoint['model_state_dict'])\n", | |
" optimizer.load_state_dict(checkpoint['optimizer_state_dict'])\n", | |
" epoch = checkpoint['epoch']" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "10MpeO_pHqlG", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"def train(ca1, loader, optimizer, epoch, r_square_list):\n", | |
" '''\n", | |
" Function for training\n", | |
" '''\n", | |
" ca1.train()\n", | |
" for step, (batch_x, batch_y) in enumerate(loader):\n", | |
" optimizer.zero_grad()\n", | |
"\n", | |
" # prepare the data\n", | |
" z = batch_x\n", | |
" r = batch_y[np.newaxis, ...]\n", | |
" # forward\n", | |
" r_pred = ca1(z, r)\n", | |
" loss = loss_fn(r_pred, r)\n", | |
" for param in ca1.parameters():\n", | |
" loss += decay * t.sum(t.abs(param.float())) # torch has no integrated L1 regulizations, so I manually wrote this part\n", | |
"\n", | |
" loss.backward()\n", | |
" optimizer.step()\n", | |
" if step % 250 == 0:\n", | |
" print('Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.format(\n", | |
" epoch, step * len(batch_x), len(loader.dataset),\n", | |
" 100. * step / len(loader), loss.item()))\n", | |
" r_square_list.append(R_square(r_pred.detach().cpu().numpy(), r.detach().cpu().numpy()))\n", | |
" print('Insample R^2 for iter: %d is %f' % (epoch+1, np.mean(r_square_list)))\n", | |
" print(\"\\n\")" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "p5h5yMN7I8Ie", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"def test(ca1, test_loader, epoch, r_square_list):\n", | |
" '''\n", | |
" Function for Cross-validation\n", | |
" '''\n", | |
" with t.no_grad():\n", | |
" ca1.eval()\n", | |
" test_loss=0\n", | |
" for step, (batch_x, batch_y) in enumerate(test_loader):\n", | |
" z = batch_x\n", | |
" r = batch_y[np.newaxis, ...]\n", | |
" r_pred = ca1(z, r)\n", | |
" r_square_list.append(R_square(r_pred.detach().cpu().numpy(), r.detach().cpu().numpy()))\n", | |
" test_loss += loss_fn(r_pred, r).item()\n", | |
" if step % 250 == 0:\n", | |
" print('Test set: Average loss: {:f}'.format(test_loss))\n", | |
" print('Test-sample R^2 for iter: %d is %f' % (epoch+1, np.mean(r_square_list)))\n", | |
" print(\"\\n\")" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "gTkaf8M1xDVR", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"def out(ca1, test_loader, epoch, r_square_list):\n", | |
" '''\n", | |
" Function for Cross-validation\n", | |
" '''\n", | |
" with t.no_grad():\n", | |
" ca1.eval()\n", | |
" test_loss=0\n", | |
" for step, (batch_x, batch_y) in enumerate(test_loader):\n", | |
" z = batch_x\n", | |
" r = batch_y[np.newaxis, ...]\n", | |
" r_pred = ca1(z, r)\n", | |
" r_square_list.append(R_square(r_pred.detach().cpu().numpy(), r.detach().cpu().numpy()))\n", | |
" print('Out-sample R^2 for iter: %d is %f' % (epoch+1, np.mean(r_square_list)))\n", | |
" print(\"\\n\")" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "IRoviu_Fuyls", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"def train_expansive(ca1, optimizer, epoch, r_square_list, test_rsq_list, out_rsq_list):\n", | |
" '''\n", | |
" Increase the training sample by one year for each refit\n", | |
" '''\n", | |
" begin_yr = 1957\n", | |
" train_begin_yr = 1974\n", | |
" train_end_yr = 2004\n", | |
" #Trainning dataset\n", | |
" for year_end in range(train_begin_yr, train_end_yr+1):\n", | |
" print(\"Training\" + \" \" + str(year_end)+\"\\n\")\n", | |
" begin_date = int(str(begin_yr)+\"0000\")\n", | |
" end_date = int(str(year_end)+\"0000\")\n", | |
" \n", | |
" X = ranked_train_df[(ranked_train_df['DATE'] <= end_date) & (ranked_train_df['DATE'] >= begin_date)].drop(['PERMNO', 'DATE', 'return'], axis=1).values\n", | |
" y = train_df[(train_df['DATE']<=end_date) & (train_df['DATE']>=begin_date)]['return'].values\n", | |
" X = t.from_numpy(X).float().cuda()\n", | |
" y = t.from_numpy(y).float().cuda()\n", | |
" torch_dataset = Data.TensorDataset(X, y)\n", | |
" \n", | |
" test_begin_year = year_end\n", | |
" test_end_year = test_begin_year + 12\n", | |
" test_begin_date = int(str(test_begin_year)+\"0000\")\n", | |
" test_end_date = int(str(test_end_year)+\"0000\")\n", | |
" \n", | |
" #Testing Dataset\n", | |
" X_test = ranked_train_df[(ranked_train_df['DATE']>test_begin_date) & (ranked_train_df['DATE']<=test_end_date) ].drop(['PERMNO', 'DATE', 'return'], axis=1).values\n", | |
" y_test = train_df[(train_df['DATE']>test_begin_date) & (train_df['DATE']<=test_end_date) ]['return'].values\n", | |
" X_test = t.from_numpy(X_test).float().cuda()\n", | |
" y_test = t.from_numpy(y_test).float().cuda()\n", | |
" test_dataset = Data.TensorDataset(X_test, y_test)\n", | |
"\n", | |
" out_begin_date = test_end_date\n", | |
" out_end_year = test_end_year + 1\n", | |
" out_end_date = int(str(out_end_year)+\"0000\")\n", | |
" #out of sample dataset\n", | |
" X_out = ranked_train_df[(ranked_train_df['DATE']>=out_begin_date) & ((ranked_train_df['DATE']<=(out_end_date+1))) ].drop(['PERMNO', 'DATE', 'return'], axis=1).values\n", | |
" y_out = train_df[(train_df['DATE']>=out_begin_date) & (train_df['DATE']<=out_end_date+1)]['return'].values\n", | |
" X_out = t.from_numpy(X_out).float().cuda()\n", | |
" y_out = t.from_numpy(y_out).float().cuda()\n", | |
" out_dataset = Data.TensorDataset(X_out, y_out)\n", | |
" \n", | |
" loader = Data.DataLoader(\n", | |
" dataset=torch_dataset,\n", | |
" batch_size=10000,\n", | |
" shuffle=True,\n", | |
" num_workers=0)\n", | |
" \n", | |
" #Test Sample Estimation Dataset\n", | |
" test_loader = Data.DataLoader(\n", | |
" dataset=test_dataset,\n", | |
" batch_size=len(test_dataset),\n", | |
" shuffle=False,\n", | |
" num_workers=0)\n", | |
" \n", | |
" #Out-of-sample Estimation Dataset\n", | |
" out_loader = Data.DataLoader(\n", | |
" dataset=out_dataset,\n", | |
" batch_size=len(out_dataset),\n", | |
" shuffle=False,\n", | |
" num_workers=0)\n", | |
"\n", | |
" train(ca1, loader, optimizer, epoch, r_square_list)\n", | |
" test(ca1, test_loader , epoch, test_rsq_list)\n", | |
" out(ca1, out_loader , epoch, out_rsq_list)" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "7NduRBhuK136", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"def save_checkpoint(ca1, optimizer, epoch):\n", | |
" t.save({\n", | |
" 'epoch': epoch,\n", | |
" 'model_state_dict': ca1.state_dict(),\n", | |
" 'optimizer_state_dict': optimizer.state_dict()\n", | |
" }, \"./ck.pt\")" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"scrolled": true, | |
"id": "aO9LkKtkPUfj", | |
"colab_type": "code", | |
"outputId": "8319cca2-58f7-405c-e2eb-ef6f1a442af8", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 1000 | |
} | |
}, | |
"source": [ | |
"for epoch in range(epoch, 100):\n", | |
" r_square_list = []\n", | |
" test_rsq_list=[]\n", | |
" out_rsq_list=[]\n", | |
" train_expansive(ca1, optimizer, epoch, r_square_list, test_rsq_list, out_rsq_list)\n", | |
" #if epoch % 2 == 0: \n", | |
" print(\"Checkpointing ...\")\n", | |
" save_checkpoint(ca1, optimizer, epoch)" | |
], | |
"execution_count": 0, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"Training 1974\n", | |
"\n", | |
"Train Epoch: 14 [0/415222 (0%)]\tLoss: 0.011569\n", | |
"Insample R^2 for iter: 15 is 0.617356\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007993\n", | |
"Test-sample R^2 for iter: 15 is 0.688269\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.643267\n", | |
"\n", | |
"\n", | |
"Training 1975\n", | |
"\n", | |
"Train Epoch: 14 [0/475849 (0%)]\tLoss: 0.006288\n", | |
"Insample R^2 for iter: 15 is 0.652540\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008069\n", | |
"Test-sample R^2 for iter: 15 is 0.691451\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.613886\n", | |
"\n", | |
"\n", | |
"Training 1976\n", | |
"\n", | |
"Train Epoch: 14 [0/534401 (0%)]\tLoss: 0.006819\n", | |
"Insample R^2 for iter: 15 is 0.661362\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008445\n", | |
"Test-sample R^2 for iter: 15 is 0.688222\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.632177\n", | |
"\n", | |
"\n", | |
"Training 1977\n", | |
"\n", | |
"Train Epoch: 14 [0/593250 (0%)]\tLoss: 0.006109\n", | |
"Insample R^2 for iter: 15 is 0.668915\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007648\n", | |
"Test-sample R^2 for iter: 15 is 0.695320\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.657974\n", | |
"\n", | |
"\n", | |
"Training 1978\n", | |
"\n", | |
"Train Epoch: 14 [0/652479 (0%)]\tLoss: 0.006131\n", | |
"Insample R^2 for iter: 15 is 0.676912\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007804\n", | |
"Test-sample R^2 for iter: 15 is 0.699633\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.662804\n", | |
"\n", | |
"\n", | |
"Training 1979\n", | |
"\n", | |
"Train Epoch: 14 [0/710435 (0%)]\tLoss: 0.005751\n", | |
"Insample R^2 for iter: 15 is 0.683200\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008211\n", | |
"Test-sample R^2 for iter: 15 is 0.701923\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.663845\n", | |
"\n", | |
"\n", | |
"Training 1980\n", | |
"\n", | |
"Train Epoch: 14 [0/767839 (0%)]\tLoss: 0.005472\n", | |
"Insample R^2 for iter: 15 is 0.688910\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009009\n", | |
"Test-sample R^2 for iter: 15 is 0.703611\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.660794\n", | |
"\n", | |
"\n", | |
"Training 1981\n", | |
"\n", | |
"Train Epoch: 14 [0/825704 (0%)]\tLoss: 0.006428\n", | |
"Insample R^2 for iter: 15 is 0.694098\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010003\n", | |
"Test-sample R^2 for iter: 15 is 0.702782\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.661785\n", | |
"\n", | |
"\n", | |
"Training 1982\n", | |
"\n", | |
"Train Epoch: 14 [0/887496 (0%)]\tLoss: 0.006266\n", | |
"Insample R^2 for iter: 15 is 0.698593\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010151\n", | |
"Test-sample R^2 for iter: 15 is 0.702431\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.670856\n", | |
"\n", | |
"\n", | |
"Training 1983\n", | |
"\n", | |
"Train Epoch: 14 [0/951438 (0%)]\tLoss: 0.005644\n", | |
"Insample R^2 for iter: 15 is 0.702945\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009331\n", | |
"Test-sample R^2 for iter: 15 is 0.703921\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.683577\n", | |
"\n", | |
"\n", | |
"Training 1984\n", | |
"\n", | |
"Train Epoch: 14 [0/1019788 (0%)]\tLoss: 0.005661\n", | |
"Insample R^2 for iter: 15 is 0.707165\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008525\n", | |
"Test-sample R^2 for iter: 15 is 0.706898\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.692728\n", | |
"\n", | |
"\n", | |
"Training 1985\n", | |
"\n", | |
"Train Epoch: 14 [0/1094741 (0%)]\tLoss: 0.005670\n", | |
"Insample R^2 for iter: 15 is 0.711299\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008047\n", | |
"Test-sample R^2 for iter: 15 is 0.710963\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.702559\n", | |
"\n", | |
"\n", | |
"Training 1986\n", | |
"\n", | |
"Train Epoch: 14 [0/1169628 (0%)]\tLoss: 0.006286\n", | |
"Insample R^2 for iter: 15 is 0.715561\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008295\n", | |
"Test-sample R^2 for iter: 15 is 0.713812\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.702776\n", | |
"\n", | |
"\n", | |
"Training 1987\n", | |
"\n", | |
"Train Epoch: 14 [0/1246063 (0%)]\tLoss: 0.006881\n", | |
"Insample R^2 for iter: 15 is 0.719060\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008260\n", | |
"Test-sample R^2 for iter: 15 is 0.716959\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.707487\n", | |
"\n", | |
"\n", | |
"Training 1988\n", | |
"\n", | |
"Train Epoch: 14 [0/1328981 (0%)]\tLoss: 0.006765\n", | |
"Insample R^2 for iter: 15 is 0.721536\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008570\n", | |
"Test-sample R^2 for iter: 15 is 0.719817\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.707443\n", | |
"\n", | |
"\n", | |
"Training 1989\n", | |
"\n", | |
"Train Epoch: 14 [0/1413142 (0%)]\tLoss: 0.005334\n", | |
"Insample R^2 for iter: 15 is 0.724378\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010005\n", | |
"Test-sample R^2 for iter: 15 is 0.721292\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.705859\n", | |
"\n", | |
"\n", | |
"Training 1990\n", | |
"\n", | |
"Train Epoch: 14 [0/1494994 (0%)]\tLoss: 0.005765\n", | |
"Insample R^2 for iter: 15 is 0.727219\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011330\n", | |
"Test-sample R^2 for iter: 15 is 0.721883\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.710861\n", | |
"\n", | |
"\n", | |
"Training 1991\n", | |
"\n", | |
"Train Epoch: 14 [0/1575765 (0%)]\tLoss: 0.005522\n", | |
"Insample R^2 for iter: 15 is 0.729890\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010912\n", | |
"Test-sample R^2 for iter: 15 is 0.723081\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.713792\n", | |
"\n", | |
"\n", | |
"Training 1992\n", | |
"\n", | |
"Train Epoch: 14 [0/1655543 (0%)]\tLoss: 0.005916\n", | |
"Insample R^2 for iter: 15 is 0.732061\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010955\n", | |
"Test-sample R^2 for iter: 15 is 0.723646\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.715586\n", | |
"\n", | |
"\n", | |
"Training 1993\n", | |
"\n", | |
"Train Epoch: 14 [0/1737145 (0%)]\tLoss: 0.006173\n", | |
"Insample R^2 for iter: 15 is 0.733718\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009929\n", | |
"Test-sample R^2 for iter: 15 is 0.724811\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.711324\n", | |
"\n", | |
"\n", | |
"Training 1994\n", | |
"\n", | |
"Train Epoch: 14 [0/1823681 (0%)]\tLoss: 0.006492\n", | |
"Insample R^2 for iter: 15 is 0.735336\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009980\n", | |
"Test-sample R^2 for iter: 15 is 0.725385\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.706335\n", | |
"\n", | |
"\n", | |
"Training 1995\n", | |
"\n", | |
"Train Epoch: 14 [0/1919298 (0%)]\tLoss: 0.006853\n", | |
"Insample R^2 for iter: 15 is 0.736903\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010112\n", | |
"Test-sample R^2 for iter: 15 is 0.725669\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.700626\n", | |
"\n", | |
"\n", | |
"Training 1996\n", | |
"\n", | |
"Train Epoch: 14 [0/2017668 (0%)]\tLoss: 0.005218\n", | |
"Insample R^2 for iter: 15 is 0.738746\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010075\n", | |
"Test-sample R^2 for iter: 15 is 0.725826\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.701528\n", | |
"\n", | |
"\n", | |
"Training 1997\n", | |
"\n", | |
"Train Epoch: 14 [0/2121588 (0%)]\tLoss: 0.006356\n", | |
"Insample R^2 for iter: 15 is 0.740611\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010434\n", | |
"Test-sample R^2 for iter: 15 is 0.725676\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.700731\n", | |
"\n", | |
"\n", | |
"Training 1998\n", | |
"\n", | |
"Train Epoch: 14 [0/2229613 (0%)]\tLoss: 0.007510\n", | |
"Insample R^2 for iter: 15 is 0.742498\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011179\n", | |
"Test-sample R^2 for iter: 15 is 0.725499\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.702534\n", | |
"\n", | |
"\n", | |
"Training 1999\n", | |
"\n", | |
"Train Epoch: 14 [0/2336372 (0%)]\tLoss: 0.007796\n", | |
"Insample R^2 for iter: 15 is 0.744068\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010764\n", | |
"Test-sample R^2 for iter: 15 is 0.725260\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.703023\n", | |
"\n", | |
"\n", | |
"Training 2000\n", | |
"\n", | |
"Train Epoch: 14 [0/2437321 (0%)]\tLoss: 0.005948\n", | |
"Insample R^2 for iter: 15 is 0.745482\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010367\n", | |
"Test-sample R^2 for iter: 15 is 0.724660\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.703292\n", | |
"\n", | |
"\n", | |
"Training 2001\n", | |
"\n", | |
"Train Epoch: 14 [0/2536121 (0%)]\tLoss: 0.007171\n", | |
"Train Epoch: 14 [2500000/2536121 (98%)]\tLoss: 0.007451\n", | |
"Insample R^2 for iter: 15 is 0.746591\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009239\n", | |
"Test-sample R^2 for iter: 15 is 0.723877\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.698854\n", | |
"\n", | |
"\n", | |
"Training 2002\n", | |
"\n", | |
"Train Epoch: 14 [0/2628379 (0%)]\tLoss: 0.006617\n", | |
"Train Epoch: 14 [2500000/2628379 (95%)]\tLoss: 0.008679\n", | |
"Insample R^2 for iter: 15 is 0.747305\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008068\n", | |
"Test-sample R^2 for iter: 15 is 0.722980\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.696642\n", | |
"\n", | |
"\n", | |
"Training 2003\n", | |
"\n", | |
"Train Epoch: 14 [0/2714364 (0%)]\tLoss: 0.006816\n", | |
"Train Epoch: 14 [2500000/2714364 (92%)]\tLoss: 0.017163\n", | |
"Insample R^2 for iter: 15 is 0.748236\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008446\n", | |
"Test-sample R^2 for iter: 15 is 0.720709\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.696246\n", | |
"\n", | |
"\n", | |
"Training 2004\n", | |
"\n", | |
"Train Epoch: 14 [0/2795471 (0%)]\tLoss: 0.007348\n", | |
"Train Epoch: 14 [2500000/2795471 (89%)]\tLoss: 0.007906\n", | |
"Insample R^2 for iter: 15 is 0.749230\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009132\n", | |
"Test-sample R^2 for iter: 15 is 0.717184\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 15 is 0.696535\n", | |
"\n", | |
"\n", | |
"Checkpointing ...\n", | |
"Training 1974\n", | |
"\n", | |
"Train Epoch: 15 [0/415222 (0%)]\tLoss: 0.016229\n", | |
"Insample R^2 for iter: 16 is 0.596179\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008213\n", | |
"Test-sample R^2 for iter: 16 is 0.679717\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.652917\n", | |
"\n", | |
"\n", | |
"Training 1975\n", | |
"\n", | |
"Train Epoch: 15 [0/475849 (0%)]\tLoss: 0.005822\n", | |
"Insample R^2 for iter: 16 is 0.641223\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007369\n", | |
"Test-sample R^2 for iter: 16 is 0.700421\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.640637\n", | |
"\n", | |
"\n", | |
"Training 1976\n", | |
"\n", | |
"Train Epoch: 15 [0/534401 (0%)]\tLoss: 0.006189\n", | |
"Insample R^2 for iter: 16 is 0.652780\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007274\n", | |
"Test-sample R^2 for iter: 16 is 0.708915\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.677407\n", | |
"\n", | |
"\n", | |
"Training 1977\n", | |
"\n", | |
"Train Epoch: 15 [0/593250 (0%)]\tLoss: 0.006609\n", | |
"Insample R^2 for iter: 16 is 0.662499\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007296\n", | |
"Test-sample R^2 for iter: 16 is 0.714092\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.705850\n", | |
"\n", | |
"\n", | |
"Training 1978\n", | |
"\n", | |
"Train Epoch: 15 [0/652479 (0%)]\tLoss: 0.005802\n", | |
"Insample R^2 for iter: 16 is 0.671816\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007312\n", | |
"Test-sample R^2 for iter: 16 is 0.718224\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.709849\n", | |
"\n", | |
"\n", | |
"Training 1979\n", | |
"\n", | |
"Train Epoch: 15 [0/710435 (0%)]\tLoss: 0.005861\n", | |
"Insample R^2 for iter: 16 is 0.679044\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007560\n", | |
"Test-sample R^2 for iter: 16 is 0.721208\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.704147\n", | |
"\n", | |
"\n", | |
"Training 1980\n", | |
"\n", | |
"Train Epoch: 15 [0/767839 (0%)]\tLoss: 0.005730\n", | |
"Insample R^2 for iter: 16 is 0.685609\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008584\n", | |
"Test-sample R^2 for iter: 16 is 0.722069\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.697339\n", | |
"\n", | |
"\n", | |
"Training 1981\n", | |
"\n", | |
"Train Epoch: 15 [0/825704 (0%)]\tLoss: 0.006041\n", | |
"Insample R^2 for iter: 16 is 0.691242\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009352\n", | |
"Test-sample R^2 for iter: 16 is 0.721400\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.697469\n", | |
"\n", | |
"\n", | |
"Training 1982\n", | |
"\n", | |
"Train Epoch: 15 [0/887496 (0%)]\tLoss: 0.005690\n", | |
"Insample R^2 for iter: 16 is 0.696001\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009655\n", | |
"Test-sample R^2 for iter: 16 is 0.720609\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.704799\n", | |
"\n", | |
"\n", | |
"Training 1983\n", | |
"\n", | |
"Train Epoch: 15 [0/951438 (0%)]\tLoss: 0.005975\n", | |
"Insample R^2 for iter: 16 is 0.700804\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008636\n", | |
"Test-sample R^2 for iter: 16 is 0.722387\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.714922\n", | |
"\n", | |
"\n", | |
"Training 1984\n", | |
"\n", | |
"Train Epoch: 15 [0/1019788 (0%)]\tLoss: 0.007610\n", | |
"Insample R^2 for iter: 16 is 0.705358\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008006\n", | |
"Test-sample R^2 for iter: 16 is 0.725143\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.721045\n", | |
"\n", | |
"\n", | |
"Training 1985\n", | |
"\n", | |
"Train Epoch: 15 [0/1094741 (0%)]\tLoss: 0.005905\n", | |
"Insample R^2 for iter: 16 is 0.709785\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008153\n", | |
"Test-sample R^2 for iter: 16 is 0.727421\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.728032\n", | |
"\n", | |
"\n", | |
"Training 1986\n", | |
"\n", | |
"Train Epoch: 15 [0/1169628 (0%)]\tLoss: 0.005482\n", | |
"Insample R^2 for iter: 16 is 0.714182\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008187\n", | |
"Test-sample R^2 for iter: 16 is 0.729257\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.727141\n", | |
"\n", | |
"\n", | |
"Training 1987\n", | |
"\n", | |
"Train Epoch: 15 [0/1246063 (0%)]\tLoss: 0.005484\n", | |
"Insample R^2 for iter: 16 is 0.718108\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008477\n", | |
"Test-sample R^2 for iter: 16 is 0.730846\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.729114\n", | |
"\n", | |
"\n", | |
"Training 1988\n", | |
"\n", | |
"Train Epoch: 15 [0/1328981 (0%)]\tLoss: 0.006215\n", | |
"Insample R^2 for iter: 16 is 0.720674\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008449\n", | |
"Test-sample R^2 for iter: 16 is 0.733003\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.728888\n", | |
"\n", | |
"\n", | |
"Training 1989\n", | |
"\n", | |
"Train Epoch: 15 [0/1413142 (0%)]\tLoss: 0.006362\n", | |
"Insample R^2 for iter: 16 is 0.723482\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010596\n", | |
"Test-sample R^2 for iter: 16 is 0.732706\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.725558\n", | |
"\n", | |
"\n", | |
"Training 1990\n", | |
"\n", | |
"Train Epoch: 15 [0/1494994 (0%)]\tLoss: 0.005113\n", | |
"Insample R^2 for iter: 16 is 0.726410\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010977\n", | |
"Test-sample R^2 for iter: 16 is 0.733118\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.730109\n", | |
"\n", | |
"\n", | |
"Training 1991\n", | |
"\n", | |
"Train Epoch: 15 [0/1575765 (0%)]\tLoss: 0.007434\n", | |
"Insample R^2 for iter: 16 is 0.729158\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011257\n", | |
"Test-sample R^2 for iter: 16 is 0.733241\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.731873\n", | |
"\n", | |
"\n", | |
"Training 1992\n", | |
"\n", | |
"Train Epoch: 15 [0/1655543 (0%)]\tLoss: 0.006213\n", | |
"Insample R^2 for iter: 16 is 0.731485\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010529\n", | |
"Test-sample R^2 for iter: 16 is 0.733815\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.732582\n", | |
"\n", | |
"\n", | |
"Training 1993\n", | |
"\n", | |
"Train Epoch: 15 [0/1737145 (0%)]\tLoss: 0.006189\n", | |
"Insample R^2 for iter: 16 is 0.733293\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009694\n", | |
"Test-sample R^2 for iter: 16 is 0.734772\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.727377\n", | |
"\n", | |
"\n", | |
"Training 1994\n", | |
"\n", | |
"Train Epoch: 15 [0/1823681 (0%)]\tLoss: 0.006569\n", | |
"Insample R^2 for iter: 16 is 0.735053\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009952\n", | |
"Test-sample R^2 for iter: 16 is 0.734906\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.722018\n", | |
"\n", | |
"\n", | |
"Training 1995\n", | |
"\n", | |
"Train Epoch: 15 [0/1919298 (0%)]\tLoss: 0.006197\n", | |
"Insample R^2 for iter: 16 is 0.736618\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009815\n", | |
"Test-sample R^2 for iter: 16 is 0.735115\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.712213\n", | |
"\n", | |
"\n", | |
"Training 1996\n", | |
"\n", | |
"Train Epoch: 15 [0/2017668 (0%)]\tLoss: 0.005896\n", | |
"Insample R^2 for iter: 16 is 0.738455\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009724\n", | |
"Test-sample R^2 for iter: 16 is 0.735271\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.711400\n", | |
"\n", | |
"\n", | |
"Training 1997\n", | |
"\n", | |
"Train Epoch: 15 [0/2121588 (0%)]\tLoss: 0.006231\n", | |
"Insample R^2 for iter: 16 is 0.740348\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010266\n", | |
"Test-sample R^2 for iter: 16 is 0.734914\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.711108\n", | |
"\n", | |
"\n", | |
"Training 1998\n", | |
"\n", | |
"Train Epoch: 15 [0/2229613 (0%)]\tLoss: 0.005627\n", | |
"Insample R^2 for iter: 16 is 0.742131\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011199\n", | |
"Test-sample R^2 for iter: 16 is 0.734348\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.712873\n", | |
"\n", | |
"\n", | |
"Training 1999\n", | |
"\n", | |
"Train Epoch: 15 [0/2336372 (0%)]\tLoss: 0.007626\n", | |
"Insample R^2 for iter: 16 is 0.743705\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011136\n", | |
"Test-sample R^2 for iter: 16 is 0.733395\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.713967\n", | |
"\n", | |
"\n", | |
"Training 2000\n", | |
"\n", | |
"Train Epoch: 15 [0/2437321 (0%)]\tLoss: 0.009701\n", | |
"Insample R^2 for iter: 16 is 0.745249\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010423\n", | |
"Test-sample R^2 for iter: 16 is 0.732435\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.713190\n", | |
"\n", | |
"\n", | |
"Training 2001\n", | |
"\n", | |
"Train Epoch: 15 [0/2536121 (0%)]\tLoss: 0.006253\n", | |
"Train Epoch: 15 [2500000/2536121 (98%)]\tLoss: 0.006635\n", | |
"Insample R^2 for iter: 16 is 0.746375\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009706\n", | |
"Test-sample R^2 for iter: 16 is 0.730839\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.705508\n", | |
"\n", | |
"\n", | |
"Training 2002\n", | |
"\n", | |
"Train Epoch: 15 [0/2628379 (0%)]\tLoss: 0.009370\n", | |
"Train Epoch: 15 [2500000/2628379 (95%)]\tLoss: 0.008954\n", | |
"Insample R^2 for iter: 16 is 0.747238\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009868\n", | |
"Test-sample R^2 for iter: 16 is 0.727378\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.695985\n", | |
"\n", | |
"\n", | |
"Training 2003\n", | |
"\n", | |
"Train Epoch: 15 [0/2714364 (0%)]\tLoss: 0.006326\n", | |
"Train Epoch: 15 [2500000/2714364 (92%)]\tLoss: 0.006599\n", | |
"Insample R^2 for iter: 16 is 0.748260\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008335\n", | |
"Test-sample R^2 for iter: 16 is 0.725110\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.696579\n", | |
"\n", | |
"\n", | |
"Training 2004\n", | |
"\n", | |
"Train Epoch: 15 [0/2795471 (0%)]\tLoss: 0.006271\n", | |
"Train Epoch: 15 [2500000/2795471 (89%)]\tLoss: 0.008344\n", | |
"Insample R^2 for iter: 16 is 0.749210\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008609\n", | |
"Test-sample R^2 for iter: 16 is 0.722161\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 16 is 0.697299\n", | |
"\n", | |
"\n", | |
"Checkpointing ...\n", | |
"Training 1974\n", | |
"\n", | |
"Train Epoch: 16 [0/415222 (0%)]\tLoss: 0.011671\n", | |
"Insample R^2 for iter: 17 is 0.617919\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007431\n", | |
"Test-sample R^2 for iter: 17 is 0.710187\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.680282\n", | |
"\n", | |
"\n", | |
"Training 1975\n", | |
"\n", | |
"Train Epoch: 16 [0/475849 (0%)]\tLoss: 0.005747\n", | |
"Insample R^2 for iter: 17 is 0.652067\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007532\n", | |
"Test-sample R^2 for iter: 17 is 0.712574\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.660646\n", | |
"\n", | |
"\n", | |
"Training 1976\n", | |
"\n", | |
"Train Epoch: 16 [0/534401 (0%)]\tLoss: 0.007221\n", | |
"Insample R^2 for iter: 17 is 0.660394\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007421\n", | |
"Test-sample R^2 for iter: 17 is 0.715159\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.687548\n", | |
"\n", | |
"\n", | |
"Training 1977\n", | |
"\n", | |
"Train Epoch: 16 [0/593250 (0%)]\tLoss: 0.006688\n", | |
"Insample R^2 for iter: 17 is 0.668336\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007469\n", | |
"Test-sample R^2 for iter: 17 is 0.717180\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.708889\n", | |
"\n", | |
"\n", | |
"Training 1978\n", | |
"\n", | |
"Train Epoch: 16 [0/652479 (0%)]\tLoss: 0.005815\n", | |
"Insample R^2 for iter: 17 is 0.675973\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007344\n", | |
"Test-sample R^2 for iter: 17 is 0.720464\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.711104\n", | |
"\n", | |
"\n", | |
"Training 1979\n", | |
"\n", | |
"Train Epoch: 16 [0/710435 (0%)]\tLoss: 0.006054\n", | |
"Insample R^2 for iter: 17 is 0.682130\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008056\n", | |
"Test-sample R^2 for iter: 17 is 0.720186\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.705108\n", | |
"\n", | |
"\n", | |
"Training 1980\n", | |
"\n", | |
"Train Epoch: 16 [0/767839 (0%)]\tLoss: 0.005431\n", | |
"Insample R^2 for iter: 17 is 0.688093\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009356\n", | |
"Test-sample R^2 for iter: 17 is 0.717690\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.697241\n", | |
"\n", | |
"\n", | |
"Training 1981\n", | |
"\n", | |
"Train Epoch: 16 [0/825704 (0%)]\tLoss: 0.005897\n", | |
"Insample R^2 for iter: 17 is 0.693342\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010149\n", | |
"Test-sample R^2 for iter: 17 is 0.714548\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.692611\n", | |
"\n", | |
"\n", | |
"Training 1982\n", | |
"\n", | |
"Train Epoch: 16 [0/887496 (0%)]\tLoss: 0.006201\n", | |
"Insample R^2 for iter: 17 is 0.697738\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010297\n", | |
"Test-sample R^2 for iter: 17 is 0.712409\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.695844\n", | |
"\n", | |
"\n", | |
"Training 1983\n", | |
"\n", | |
"Train Epoch: 16 [0/951438 (0%)]\tLoss: 0.005856\n", | |
"Insample R^2 for iter: 17 is 0.702159\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008913\n", | |
"Test-sample R^2 for iter: 17 is 0.714166\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.706587\n", | |
"\n", | |
"\n", | |
"Training 1984\n", | |
"\n", | |
"Train Epoch: 16 [0/1019788 (0%)]\tLoss: 0.005808\n", | |
"Insample R^2 for iter: 17 is 0.706404\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008483\n", | |
"Test-sample R^2 for iter: 17 is 0.716329\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.713243\n", | |
"\n", | |
"\n", | |
"Training 1985\n", | |
"\n", | |
"Train Epoch: 16 [0/1094741 (0%)]\tLoss: 0.008782\n", | |
"Insample R^2 for iter: 17 is 0.710635\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008236\n", | |
"Test-sample R^2 for iter: 17 is 0.719129\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.720551\n", | |
"\n", | |
"\n", | |
"Training 1986\n", | |
"\n", | |
"Train Epoch: 16 [0/1169628 (0%)]\tLoss: 0.005556\n", | |
"Insample R^2 for iter: 17 is 0.714954\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007884\n", | |
"Test-sample R^2 for iter: 17 is 0.722310\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.719886\n", | |
"\n", | |
"\n", | |
"Training 1987\n", | |
"\n", | |
"Train Epoch: 16 [0/1246063 (0%)]\tLoss: 0.006290\n", | |
"Insample R^2 for iter: 17 is 0.718784\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008238\n", | |
"Test-sample R^2 for iter: 17 is 0.724896\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.723765\n", | |
"\n", | |
"\n", | |
"Training 1988\n", | |
"\n", | |
"Train Epoch: 16 [0/1328981 (0%)]\tLoss: 0.006689\n", | |
"Insample R^2 for iter: 17 is 0.721310\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008596\n", | |
"Test-sample R^2 for iter: 17 is 0.727176\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.722782\n", | |
"\n", | |
"\n", | |
"Training 1989\n", | |
"\n", | |
"Train Epoch: 16 [0/1413142 (0%)]\tLoss: 0.006059\n", | |
"Insample R^2 for iter: 17 is 0.723995\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009316\n", | |
"Test-sample R^2 for iter: 17 is 0.729296\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.722111\n", | |
"\n", | |
"\n", | |
"Training 1990\n", | |
"\n", | |
"Train Epoch: 16 [0/1494994 (0%)]\tLoss: 0.005832\n", | |
"Insample R^2 for iter: 17 is 0.727031\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011136\n", | |
"Test-sample R^2 for iter: 17 is 0.729687\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.726566\n", | |
"\n", | |
"\n", | |
"Training 1991\n", | |
"\n", | |
"Train Epoch: 16 [0/1575765 (0%)]\tLoss: 0.008340\n", | |
"Insample R^2 for iter: 17 is 0.729896\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011511\n", | |
"Test-sample R^2 for iter: 17 is 0.729669\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.726969\n", | |
"\n", | |
"\n", | |
"Training 1992\n", | |
"\n", | |
"Train Epoch: 16 [0/1655543 (0%)]\tLoss: 0.010153\n", | |
"Insample R^2 for iter: 17 is 0.732050\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010629\n", | |
"Test-sample R^2 for iter: 17 is 0.730303\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.728494\n", | |
"\n", | |
"\n", | |
"Training 1993\n", | |
"\n", | |
"Train Epoch: 16 [0/1737145 (0%)]\tLoss: 0.005272\n", | |
"Insample R^2 for iter: 17 is 0.734042\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009946\n", | |
"Test-sample R^2 for iter: 17 is 0.731113\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.724435\n", | |
"\n", | |
"\n", | |
"Training 1994\n", | |
"\n", | |
"Train Epoch: 16 [0/1823681 (0%)]\tLoss: 0.006979\n", | |
"Insample R^2 for iter: 17 is 0.735739\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009502\n", | |
"Test-sample R^2 for iter: 17 is 0.731986\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.719590\n", | |
"\n", | |
"\n", | |
"Training 1995\n", | |
"\n", | |
"Train Epoch: 16 [0/1919298 (0%)]\tLoss: 0.007171\n", | |
"Insample R^2 for iter: 17 is 0.737391\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009518\n", | |
"Test-sample R^2 for iter: 17 is 0.732687\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.711998\n", | |
"\n", | |
"\n", | |
"Training 1996\n", | |
"\n", | |
"Train Epoch: 16 [0/2017668 (0%)]\tLoss: 0.005789\n", | |
"Insample R^2 for iter: 17 is 0.739181\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010064\n", | |
"Test-sample R^2 for iter: 17 is 0.732551\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.711741\n", | |
"\n", | |
"\n", | |
"Training 1997\n", | |
"\n", | |
"Train Epoch: 16 [0/2121588 (0%)]\tLoss: 0.005559\n", | |
"Insample R^2 for iter: 17 is 0.740910\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010159\n", | |
"Test-sample R^2 for iter: 17 is 0.732425\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.710680\n", | |
"\n", | |
"\n", | |
"Training 1998\n", | |
"\n", | |
"Train Epoch: 16 [0/2229613 (0%)]\tLoss: 0.005209\n", | |
"Insample R^2 for iter: 17 is 0.742749\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011604\n", | |
"Test-sample R^2 for iter: 17 is 0.731555\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.712418\n", | |
"\n", | |
"\n", | |
"Training 1999\n", | |
"\n", | |
"Train Epoch: 16 [0/2336372 (0%)]\tLoss: 0.007230\n", | |
"Insample R^2 for iter: 17 is 0.744170\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010768\n", | |
"Test-sample R^2 for iter: 17 is 0.731079\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.712667\n", | |
"\n", | |
"\n", | |
"Training 2000\n", | |
"\n", | |
"Train Epoch: 16 [0/2437321 (0%)]\tLoss: 0.005961\n", | |
"Insample R^2 for iter: 17 is 0.745606\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010427\n", | |
"Test-sample R^2 for iter: 17 is 0.730201\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.714656\n", | |
"\n", | |
"\n", | |
"Training 2001\n", | |
"\n", | |
"Train Epoch: 16 [0/2536121 (0%)]\tLoss: 0.008125\n", | |
"Train Epoch: 16 [2500000/2536121 (98%)]\tLoss: 0.007054\n", | |
"Insample R^2 for iter: 17 is 0.746838\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010008\n", | |
"Test-sample R^2 for iter: 17 is 0.728337\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.705928\n", | |
"\n", | |
"\n", | |
"Training 2002\n", | |
"\n", | |
"Train Epoch: 16 [0/2628379 (0%)]\tLoss: 0.007166\n", | |
"Train Epoch: 16 [2500000/2628379 (95%)]\tLoss: 0.013044\n", | |
"Insample R^2 for iter: 17 is 0.747739\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008582\n", | |
"Test-sample R^2 for iter: 17 is 0.726621\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.701114\n", | |
"\n", | |
"\n", | |
"Training 2003\n", | |
"\n", | |
"Train Epoch: 16 [0/2714364 (0%)]\tLoss: 0.007160\n", | |
"Train Epoch: 16 [2500000/2714364 (92%)]\tLoss: 0.006645\n", | |
"Insample R^2 for iter: 17 is 0.748675\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009025\n", | |
"Test-sample R^2 for iter: 17 is 0.723440\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.700162\n", | |
"\n", | |
"\n", | |
"Training 2004\n", | |
"\n", | |
"Train Epoch: 16 [0/2795471 (0%)]\tLoss: 0.006950\n", | |
"Train Epoch: 16 [2500000/2795471 (89%)]\tLoss: 0.007957\n", | |
"Insample R^2 for iter: 17 is 0.749578\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007456\n", | |
"Test-sample R^2 for iter: 17 is 0.722127\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 17 is 0.701730\n", | |
"\n", | |
"\n", | |
"Checkpointing ...\n", | |
"Training 1974\n", | |
"\n", | |
"Train Epoch: 17 [0/415222 (0%)]\tLoss: 0.010624\n", | |
"Insample R^2 for iter: 18 is 0.622937\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007826\n", | |
"Test-sample R^2 for iter: 18 is 0.694793\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.678417\n", | |
"\n", | |
"\n", | |
"Training 1975\n", | |
"\n", | |
"Train Epoch: 17 [0/475849 (0%)]\tLoss: 0.005431\n", | |
"Insample R^2 for iter: 18 is 0.656737\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008508\n", | |
"Test-sample R^2 for iter: 18 is 0.686401\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.658765\n", | |
"\n", | |
"\n", | |
"Training 1976\n", | |
"\n", | |
"Train Epoch: 17 [0/534401 (0%)]\tLoss: 0.005746\n", | |
"Insample R^2 for iter: 18 is 0.663918\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007322\n", | |
"Test-sample R^2 for iter: 18 is 0.698963\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.689965\n", | |
"\n", | |
"\n", | |
"Training 1977\n", | |
"\n", | |
"Train Epoch: 17 [0/593250 (0%)]\tLoss: 0.006706\n", | |
"Insample R^2 for iter: 18 is 0.671306\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007363\n", | |
"Test-sample R^2 for iter: 18 is 0.706011\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.711039\n", | |
"\n", | |
"\n", | |
"Training 1978\n", | |
"\n", | |
"Train Epoch: 17 [0/652479 (0%)]\tLoss: 0.006035\n", | |
"Insample R^2 for iter: 18 is 0.678732\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007670\n", | |
"Test-sample R^2 for iter: 18 is 0.709158\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.714086\n", | |
"\n", | |
"\n", | |
"Training 1979\n", | |
"\n", | |
"Train Epoch: 17 [0/710435 (0%)]\tLoss: 0.005475\n", | |
"Insample R^2 for iter: 18 is 0.683485\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007715\n", | |
"Test-sample R^2 for iter: 18 is 0.712748\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.705872\n", | |
"\n", | |
"\n", | |
"Training 1980\n", | |
"\n", | |
"Train Epoch: 17 [0/767839 (0%)]\tLoss: 0.006132\n", | |
"Insample R^2 for iter: 18 is 0.688647\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008607\n", | |
"Test-sample R^2 for iter: 18 is 0.714715\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.703662\n", | |
"\n", | |
"\n", | |
"Training 1981\n", | |
"\n", | |
"Train Epoch: 17 [0/825704 (0%)]\tLoss: 0.005659\n", | |
"Insample R^2 for iter: 18 is 0.693737\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009928\n", | |
"Test-sample R^2 for iter: 18 is 0.712783\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.700581\n", | |
"\n", | |
"\n", | |
"Training 1982\n", | |
"\n", | |
"Train Epoch: 17 [0/887496 (0%)]\tLoss: 0.005109\n", | |
"Insample R^2 for iter: 18 is 0.698065\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010102\n", | |
"Test-sample R^2 for iter: 18 is 0.711482\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.705944\n", | |
"\n", | |
"\n", | |
"Training 1983\n", | |
"\n", | |
"Train Epoch: 17 [0/951438 (0%)]\tLoss: 0.005880\n", | |
"Insample R^2 for iter: 18 is 0.702486\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008890\n", | |
"Test-sample R^2 for iter: 18 is 0.713402\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.716236\n", | |
"\n", | |
"\n", | |
"Training 1984\n", | |
"\n", | |
"Train Epoch: 17 [0/1019788 (0%)]\tLoss: 0.005720\n", | |
"Insample R^2 for iter: 18 is 0.707032\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008484\n", | |
"Test-sample R^2 for iter: 18 is 0.715630\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.721724\n", | |
"\n", | |
"\n", | |
"Training 1985\n", | |
"\n", | |
"Train Epoch: 17 [0/1094741 (0%)]\tLoss: 0.005641\n", | |
"Insample R^2 for iter: 18 is 0.711422\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007832\n", | |
"Test-sample R^2 for iter: 18 is 0.719512\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.728565\n", | |
"\n", | |
"\n", | |
"Training 1986\n", | |
"\n", | |
"Train Epoch: 17 [0/1169628 (0%)]\tLoss: 0.005968\n", | |
"Insample R^2 for iter: 18 is 0.715670\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007596\n", | |
"Test-sample R^2 for iter: 18 is 0.723338\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.728012\n", | |
"\n", | |
"\n", | |
"Training 1987\n", | |
"\n", | |
"Train Epoch: 17 [0/1246063 (0%)]\tLoss: 0.021119\n", | |
"Insample R^2 for iter: 18 is 0.719484\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008902\n", | |
"Test-sample R^2 for iter: 18 is 0.724459\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.729408\n", | |
"\n", | |
"\n", | |
"Training 1988\n", | |
"\n", | |
"Train Epoch: 17 [0/1328981 (0%)]\tLoss: 0.005822\n", | |
"Insample R^2 for iter: 18 is 0.722084\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008877\n", | |
"Test-sample R^2 for iter: 18 is 0.726244\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.728741\n", | |
"\n", | |
"\n", | |
"Training 1989\n", | |
"\n", | |
"Train Epoch: 17 [0/1413142 (0%)]\tLoss: 0.005318\n", | |
"Insample R^2 for iter: 18 is 0.724805\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010477\n", | |
"Test-sample R^2 for iter: 18 is 0.726562\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.725266\n", | |
"\n", | |
"\n", | |
"Training 1990\n", | |
"\n", | |
"Train Epoch: 17 [0/1494994 (0%)]\tLoss: 0.005836\n", | |
"Insample R^2 for iter: 18 is 0.727708\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011111\n", | |
"Test-sample R^2 for iter: 18 is 0.727148\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.729399\n", | |
"\n", | |
"\n", | |
"Training 1991\n", | |
"\n", | |
"Train Epoch: 17 [0/1575765 (0%)]\tLoss: 0.005405\n", | |
"Insample R^2 for iter: 18 is 0.730604\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.012306\n", | |
"Test-sample R^2 for iter: 18 is 0.726232\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.728166\n", | |
"\n", | |
"\n", | |
"Training 1992\n", | |
"\n", | |
"Train Epoch: 17 [0/1655543 (0%)]\tLoss: 0.005881\n", | |
"Insample R^2 for iter: 18 is 0.732744\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010269\n", | |
"Test-sample R^2 for iter: 18 is 0.727507\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.727340\n", | |
"\n", | |
"\n", | |
"Training 1993\n", | |
"\n", | |
"Train Epoch: 17 [0/1737145 (0%)]\tLoss: 0.006451\n", | |
"Insample R^2 for iter: 18 is 0.734572\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009971\n", | |
"Test-sample R^2 for iter: 18 is 0.728425\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.725175\n", | |
"\n", | |
"\n", | |
"Training 1994\n", | |
"\n", | |
"Train Epoch: 17 [0/1823681 (0%)]\tLoss: 0.007612\n", | |
"Insample R^2 for iter: 18 is 0.736164\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010380\n", | |
"Test-sample R^2 for iter: 18 is 0.728324\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.723415\n", | |
"\n", | |
"\n", | |
"Training 1995\n", | |
"\n", | |
"Train Epoch: 17 [0/1919298 (0%)]\tLoss: 0.008631\n", | |
"Insample R^2 for iter: 18 is 0.737682\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010381\n", | |
"Test-sample R^2 for iter: 18 is 0.728149\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.715123\n", | |
"\n", | |
"\n", | |
"Training 1996\n", | |
"\n", | |
"Train Epoch: 17 [0/2017668 (0%)]\tLoss: 0.006094\n", | |
"Insample R^2 for iter: 18 is 0.739422\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010126\n", | |
"Test-sample R^2 for iter: 18 is 0.728138\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.715540\n", | |
"\n", | |
"\n", | |
"Training 1997\n", | |
"\n", | |
"Train Epoch: 17 [0/2121588 (0%)]\tLoss: 0.006250\n", | |
"Insample R^2 for iter: 18 is 0.741085\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010232\n", | |
"Test-sample R^2 for iter: 18 is 0.728116\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.714663\n", | |
"\n", | |
"\n", | |
"Training 1998\n", | |
"\n", | |
"Train Epoch: 17 [0/2229613 (0%)]\tLoss: 0.005405\n", | |
"Insample R^2 for iter: 18 is 0.742957\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011186\n", | |
"Test-sample R^2 for iter: 18 is 0.727835\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.714245\n", | |
"\n", | |
"\n", | |
"Training 1999\n", | |
"\n", | |
"Train Epoch: 17 [0/2336372 (0%)]\tLoss: 0.006245\n", | |
"Insample R^2 for iter: 18 is 0.744532\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011456\n", | |
"Test-sample R^2 for iter: 18 is 0.726811\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.714230\n", | |
"\n", | |
"\n", | |
"Training 2000\n", | |
"\n", | |
"Train Epoch: 17 [0/2437321 (0%)]\tLoss: 0.006112\n", | |
"Insample R^2 for iter: 18 is 0.745945\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010738\n", | |
"Test-sample R^2 for iter: 18 is 0.725768\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.714656\n", | |
"\n", | |
"\n", | |
"Training 2001\n", | |
"\n", | |
"Train Epoch: 17 [0/2536121 (0%)]\tLoss: 0.006718\n", | |
"Train Epoch: 17 [2500000/2536121 (98%)]\tLoss: 0.007531\n", | |
"Insample R^2 for iter: 18 is 0.747093\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009948\n", | |
"Test-sample R^2 for iter: 18 is 0.724131\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.705883\n", | |
"\n", | |
"\n", | |
"Training 2002\n", | |
"\n", | |
"Train Epoch: 17 [0/2628379 (0%)]\tLoss: 0.009663\n", | |
"Train Epoch: 17 [2500000/2628379 (95%)]\tLoss: 0.008124\n", | |
"Insample R^2 for iter: 18 is 0.747864\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008632\n", | |
"Test-sample R^2 for iter: 18 is 0.722497\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.701986\n", | |
"\n", | |
"\n", | |
"Training 2003\n", | |
"\n", | |
"Train Epoch: 17 [0/2714364 (0%)]\tLoss: 0.006425\n", | |
"Train Epoch: 17 [2500000/2714364 (92%)]\tLoss: 0.011689\n", | |
"Insample R^2 for iter: 18 is 0.748796\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008551\n", | |
"Test-sample R^2 for iter: 18 is 0.720099\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.701046\n", | |
"\n", | |
"\n", | |
"Training 2004\n", | |
"\n", | |
"Train Epoch: 17 [0/2795471 (0%)]\tLoss: 0.007554\n", | |
"Train Epoch: 17 [2500000/2795471 (89%)]\tLoss: 0.007103\n", | |
"Insample R^2 for iter: 18 is 0.749745\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009913\n", | |
"Test-sample R^2 for iter: 18 is 0.715522\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 18 is 0.700529\n", | |
"\n", | |
"\n", | |
"Checkpointing ...\n", | |
"Training 1974\n", | |
"\n", | |
"Train Epoch: 18 [0/415222 (0%)]\tLoss: 0.010891\n", | |
"Insample R^2 for iter: 19 is 0.615651\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007786\n", | |
"Test-sample R^2 for iter: 19 is 0.696368\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.667714\n", | |
"\n", | |
"\n", | |
"Training 1975\n", | |
"\n", | |
"Train Epoch: 18 [0/475849 (0%)]\tLoss: 0.005340\n", | |
"Insample R^2 for iter: 19 is 0.651421\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008118\n", | |
"Test-sample R^2 for iter: 19 is 0.694569\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.635637\n", | |
"\n", | |
"\n", | |
"Training 1976\n", | |
"\n", | |
"Train Epoch: 18 [0/534401 (0%)]\tLoss: 0.006564\n", | |
"Insample R^2 for iter: 19 is 0.660523\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008135\n", | |
"Test-sample R^2 for iter: 19 is 0.694193\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.659272\n", | |
"\n", | |
"\n", | |
"Training 1977\n", | |
"\n", | |
"Train Epoch: 18 [0/593250 (0%)]\tLoss: 0.006395\n", | |
"Insample R^2 for iter: 19 is 0.668087\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008777\n", | |
"Test-sample R^2 for iter: 19 is 0.689339\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.673027\n", | |
"\n", | |
"\n", | |
"Training 1978\n", | |
"\n", | |
"Train Epoch: 18 [0/652479 (0%)]\tLoss: 0.007000\n", | |
"Insample R^2 for iter: 19 is 0.675762\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008829\n", | |
"Test-sample R^2 for iter: 19 is 0.687416\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.671273\n", | |
"\n", | |
"\n", | |
"Training 1979\n", | |
"\n", | |
"Train Epoch: 18 [0/710435 (0%)]\tLoss: 0.005653\n", | |
"Insample R^2 for iter: 19 is 0.681266\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009559\n", | |
"Test-sample R^2 for iter: 19 is 0.683903\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.660869\n", | |
"\n", | |
"\n", | |
"Training 1980\n", | |
"\n", | |
"Train Epoch: 18 [0/767839 (0%)]\tLoss: 0.006085\n", | |
"Insample R^2 for iter: 19 is 0.687061\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010716\n", | |
"Test-sample R^2 for iter: 19 is 0.680414\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.655080\n", | |
"\n", | |
"\n", | |
"Training 1981\n", | |
"\n", | |
"Train Epoch: 18 [0/825704 (0%)]\tLoss: 0.005884\n", | |
"Insample R^2 for iter: 19 is 0.692235\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011072\n", | |
"Test-sample R^2 for iter: 19 is 0.678439\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.652752\n", | |
"\n", | |
"\n", | |
"Training 1982\n", | |
"\n", | |
"Train Epoch: 18 [0/887496 (0%)]\tLoss: 0.005638\n", | |
"Insample R^2 for iter: 19 is 0.696792\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011174\n", | |
"Test-sample R^2 for iter: 19 is 0.677430\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.659045\n", | |
"\n", | |
"\n", | |
"Training 1983\n", | |
"\n", | |
"Train Epoch: 18 [0/951438 (0%)]\tLoss: 0.005241\n", | |
"Insample R^2 for iter: 19 is 0.701235\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009155\n", | |
"Test-sample R^2 for iter: 19 is 0.681954\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.673320\n", | |
"\n", | |
"\n", | |
"Training 1984\n", | |
"\n", | |
"Train Epoch: 18 [0/1019788 (0%)]\tLoss: 0.005885\n", | |
"Insample R^2 for iter: 19 is 0.705827\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008080\n", | |
"Test-sample R^2 for iter: 19 is 0.688176\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.683058\n", | |
"\n", | |
"\n", | |
"Training 1985\n", | |
"\n", | |
"Train Epoch: 18 [0/1094741 (0%)]\tLoss: 0.006463\n", | |
"Insample R^2 for iter: 19 is 0.710145\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007933\n", | |
"Test-sample R^2 for iter: 19 is 0.694090\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.692225\n", | |
"\n", | |
"\n", | |
"Training 1986\n", | |
"\n", | |
"Train Epoch: 18 [0/1169628 (0%)]\tLoss: 0.005691\n", | |
"Insample R^2 for iter: 19 is 0.714538\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007900\n", | |
"Test-sample R^2 for iter: 19 is 0.699160\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.693460\n", | |
"\n", | |
"\n", | |
"Training 1987\n", | |
"\n", | |
"Train Epoch: 18 [0/1246063 (0%)]\tLoss: 0.005781\n", | |
"Insample R^2 for iter: 19 is 0.718328\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008312\n", | |
"Test-sample R^2 for iter: 19 is 0.703244\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.698300\n", | |
"\n", | |
"\n", | |
"Training 1988\n", | |
"\n", | |
"Train Epoch: 18 [0/1328981 (0%)]\tLoss: 0.005725\n", | |
"Insample R^2 for iter: 19 is 0.720991\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008655\n", | |
"Test-sample R^2 for iter: 19 is 0.706857\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.698884\n", | |
"\n", | |
"\n", | |
"Training 1989\n", | |
"\n", | |
"Train Epoch: 18 [0/1413142 (0%)]\tLoss: 0.006638\n", | |
"Insample R^2 for iter: 19 is 0.723933\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009772\n", | |
"Test-sample R^2 for iter: 19 is 0.709515\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.698556\n", | |
"\n", | |
"\n", | |
"Training 1990\n", | |
"\n", | |
"Train Epoch: 18 [0/1494994 (0%)]\tLoss: 0.005631\n", | |
"Insample R^2 for iter: 19 is 0.727057\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011552\n", | |
"Test-sample R^2 for iter: 19 is 0.710489\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.703686\n", | |
"\n", | |
"\n", | |
"Training 1991\n", | |
"\n", | |
"Train Epoch: 18 [0/1575765 (0%)]\tLoss: 0.005424\n", | |
"Insample R^2 for iter: 19 is 0.730002\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011470\n", | |
"Test-sample R^2 for iter: 19 is 0.711592\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.703920\n", | |
"\n", | |
"\n", | |
"Training 1992\n", | |
"\n", | |
"Train Epoch: 18 [0/1655543 (0%)]\tLoss: 0.007761\n", | |
"Insample R^2 for iter: 19 is 0.732201\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010698\n", | |
"Test-sample R^2 for iter: 19 is 0.713089\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.703918\n", | |
"\n", | |
"\n", | |
"Training 1993\n", | |
"\n", | |
"Train Epoch: 18 [0/1737145 (0%)]\tLoss: 0.006712\n", | |
"Insample R^2 for iter: 19 is 0.734085\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009594\n", | |
"Test-sample R^2 for iter: 19 is 0.715208\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.702752\n", | |
"\n", | |
"\n", | |
"Training 1994\n", | |
"\n", | |
"Train Epoch: 18 [0/1823681 (0%)]\tLoss: 0.006195\n", | |
"Insample R^2 for iter: 19 is 0.735872\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009483\n", | |
"Test-sample R^2 for iter: 19 is 0.716864\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.699765\n", | |
"\n", | |
"\n", | |
"Training 1995\n", | |
"\n", | |
"Train Epoch: 18 [0/1919298 (0%)]\tLoss: 0.005658\n", | |
"Insample R^2 for iter: 19 is 0.737658\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009936\n", | |
"Test-sample R^2 for iter: 19 is 0.717748\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.694110\n", | |
"\n", | |
"\n", | |
"Training 1996\n", | |
"\n", | |
"Train Epoch: 18 [0/2017668 (0%)]\tLoss: 0.005905\n", | |
"Insample R^2 for iter: 19 is 0.739419\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009986\n", | |
"Test-sample R^2 for iter: 19 is 0.718353\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.693595\n", | |
"\n", | |
"\n", | |
"Training 1997\n", | |
"\n", | |
"Train Epoch: 18 [0/2121588 (0%)]\tLoss: 0.005782\n", | |
"Insample R^2 for iter: 19 is 0.741192\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010191\n", | |
"Test-sample R^2 for iter: 19 is 0.718784\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.692924\n", | |
"\n", | |
"\n", | |
"Training 1998\n", | |
"\n", | |
"Train Epoch: 18 [0/2229613 (0%)]\tLoss: 0.005490\n", | |
"Insample R^2 for iter: 19 is 0.743146\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011716\n", | |
"Test-sample R^2 for iter: 19 is 0.718347\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.695282\n", | |
"\n", | |
"\n", | |
"Training 1999\n", | |
"\n", | |
"Train Epoch: 18 [0/2336372 (0%)]\tLoss: 0.006535\n", | |
"Insample R^2 for iter: 19 is 0.744687\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.012117\n", | |
"Test-sample R^2 for iter: 19 is 0.717026\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.696170\n", | |
"\n", | |
"\n", | |
"Training 2000\n", | |
"\n", | |
"Train Epoch: 18 [0/2437321 (0%)]\tLoss: 0.006691\n", | |
"Insample R^2 for iter: 19 is 0.746167\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010533\n", | |
"Test-sample R^2 for iter: 19 is 0.716558\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.695754\n", | |
"\n", | |
"\n", | |
"Training 2001\n", | |
"\n", | |
"Train Epoch: 18 [0/2536121 (0%)]\tLoss: 0.013128\n", | |
"Train Epoch: 18 [2500000/2536121 (98%)]\tLoss: 0.008219\n", | |
"Insample R^2 for iter: 19 is 0.747347\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009887\n", | |
"Test-sample R^2 for iter: 19 is 0.715320\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.688049\n", | |
"\n", | |
"\n", | |
"Training 2002\n", | |
"\n", | |
"Train Epoch: 18 [0/2628379 (0%)]\tLoss: 0.039288\n", | |
"Train Epoch: 18 [2500000/2628379 (95%)]\tLoss: 0.006775\n", | |
"Insample R^2 for iter: 19 is 0.748163\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009094\n", | |
"Test-sample R^2 for iter: 19 is 0.713394\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.681970\n", | |
"\n", | |
"\n", | |
"Training 2003\n", | |
"\n", | |
"Train Epoch: 18 [0/2714364 (0%)]\tLoss: 0.005969\n", | |
"Train Epoch: 18 [2500000/2714364 (92%)]\tLoss: 0.007744\n", | |
"Insample R^2 for iter: 19 is 0.749156\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009351\n", | |
"Test-sample R^2 for iter: 19 is 0.710209\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.681103\n", | |
"\n", | |
"\n", | |
"Training 2004\n", | |
"\n", | |
"Train Epoch: 18 [0/2795471 (0%)]\tLoss: 0.006597\n", | |
"Train Epoch: 18 [2500000/2795471 (89%)]\tLoss: 0.007339\n", | |
"Insample R^2 for iter: 19 is 0.750100\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008177\n", | |
"Test-sample R^2 for iter: 19 is 0.708333\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 19 is 0.682530\n", | |
"\n", | |
"\n", | |
"Checkpointing ...\n", | |
"Training 1974\n", | |
"\n", | |
"Train Epoch: 19 [0/415222 (0%)]\tLoss: 0.010340\n", | |
"Insample R^2 for iter: 20 is 0.625091\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007674\n", | |
"Test-sample R^2 for iter: 20 is 0.700708\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.686107\n", | |
"\n", | |
"\n", | |
"Training 1975\n", | |
"\n", | |
"Train Epoch: 19 [0/475849 (0%)]\tLoss: 0.005328\n", | |
"Insample R^2 for iter: 20 is 0.656104\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007970\n", | |
"Test-sample R^2 for iter: 20 is 0.699549\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.652822\n", | |
"\n", | |
"\n", | |
"Training 1976\n", | |
"\n", | |
"Train Epoch: 19 [0/534401 (0%)]\tLoss: 0.006323\n", | |
"Insample R^2 for iter: 20 is 0.663163\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007970\n", | |
"Test-sample R^2 for iter: 20 is 0.699590\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.676206\n", | |
"\n", | |
"\n", | |
"Training 1977\n", | |
"\n", | |
"Train Epoch: 19 [0/593250 (0%)]\tLoss: 0.007373\n", | |
"Insample R^2 for iter: 20 is 0.670734\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008159\n", | |
"Test-sample R^2 for iter: 20 is 0.699107\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.690577\n", | |
"\n", | |
"\n", | |
"Training 1978\n", | |
"\n", | |
"Train Epoch: 19 [0/652479 (0%)]\tLoss: 0.005586\n", | |
"Insample R^2 for iter: 20 is 0.678073\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008404\n", | |
"Test-sample R^2 for iter: 20 is 0.698311\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.685068\n", | |
"\n", | |
"\n", | |
"Training 1979\n", | |
"\n", | |
"Train Epoch: 19 [0/710435 (0%)]\tLoss: 0.005689\n", | |
"Insample R^2 for iter: 20 is 0.683547\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008842\n", | |
"Test-sample R^2 for iter: 20 is 0.697153\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.680209\n", | |
"\n", | |
"\n", | |
"Training 1980\n", | |
"\n", | |
"Train Epoch: 19 [0/767839 (0%)]\tLoss: 0.006944\n", | |
"Insample R^2 for iter: 20 is 0.689030\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009603\n", | |
"Test-sample R^2 for iter: 20 is 0.696825\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.679936\n", | |
"\n", | |
"\n", | |
"Training 1981\n", | |
"\n", | |
"Train Epoch: 19 [0/825704 (0%)]\tLoss: 0.005191\n", | |
"Insample R^2 for iter: 20 is 0.693791\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010095\n", | |
"Test-sample R^2 for iter: 20 is 0.696497\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.675382\n", | |
"\n", | |
"\n", | |
"Training 1982\n", | |
"\n", | |
"Train Epoch: 19 [0/887496 (0%)]\tLoss: 0.005752\n", | |
"Insample R^2 for iter: 20 is 0.698166\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010522\n", | |
"Test-sample R^2 for iter: 20 is 0.695625\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.677485\n", | |
"\n", | |
"\n", | |
"Training 1983\n", | |
"\n", | |
"Train Epoch: 19 [0/951438 (0%)]\tLoss: 0.006685\n", | |
"Insample R^2 for iter: 20 is 0.702475\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009566\n", | |
"Test-sample R^2 for iter: 20 is 0.697085\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.688904\n", | |
"\n", | |
"\n", | |
"Training 1984\n", | |
"\n", | |
"Train Epoch: 19 [0/1019788 (0%)]\tLoss: 0.005382\n", | |
"Insample R^2 for iter: 20 is 0.706961\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008170\n", | |
"Test-sample R^2 for iter: 20 is 0.701679\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.697285\n", | |
"\n", | |
"\n", | |
"Training 1985\n", | |
"\n", | |
"Train Epoch: 19 [0/1094741 (0%)]\tLoss: 0.005640\n", | |
"Insample R^2 for iter: 20 is 0.711213\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007981\n", | |
"Test-sample R^2 for iter: 20 is 0.706347\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.705117\n", | |
"\n", | |
"\n", | |
"Training 1986\n", | |
"\n", | |
"Train Epoch: 19 [0/1169628 (0%)]\tLoss: 0.009523\n", | |
"Insample R^2 for iter: 20 is 0.715510\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.007952\n", | |
"Test-sample R^2 for iter: 20 is 0.710353\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.704747\n", | |
"\n", | |
"\n", | |
"Training 1987\n", | |
"\n", | |
"Train Epoch: 19 [0/1246063 (0%)]\tLoss: 0.021198\n", | |
"Insample R^2 for iter: 20 is 0.719326\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008233\n", | |
"Test-sample R^2 for iter: 20 is 0.713803\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.708513\n", | |
"\n", | |
"\n", | |
"Training 1988\n", | |
"\n", | |
"Train Epoch: 19 [0/1328981 (0%)]\tLoss: 0.005975\n", | |
"Insample R^2 for iter: 20 is 0.721808\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.008269\n", | |
"Test-sample R^2 for iter: 20 is 0.717434\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.709759\n", | |
"\n", | |
"\n", | |
"Training 1989\n", | |
"\n", | |
"Train Epoch: 19 [0/1413142 (0%)]\tLoss: 0.005909\n", | |
"Insample R^2 for iter: 20 is 0.724500\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.009552\n", | |
"Test-sample R^2 for iter: 20 is 0.719785\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.709079\n", | |
"\n", | |
"\n", | |
"Training 1990\n", | |
"\n", | |
"Train Epoch: 19 [0/1494994 (0%)]\tLoss: 0.005772\n", | |
"Insample R^2 for iter: 20 is 0.727540\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.011395\n", | |
"Test-sample R^2 for iter: 20 is 0.720372\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.713752\n", | |
"\n", | |
"\n", | |
"Training 1991\n", | |
"\n", | |
"Train Epoch: 19 [0/1575765 (0%)]\tLoss: 0.005528\n", | |
"Insample R^2 for iter: 20 is 0.730419\n", | |
"\n", | |
"\n", | |
"Test set: Average loss: 0.010902\n", | |
"Test-sample R^2 for iter: 20 is 0.721667\n", | |
"\n", | |
"\n", | |
"Out-sample R^2 for iter: 20 is 0.715524\n", | |
"\n", | |
"\n", | |
"Training 1992\n", | |
"\n", | |
"Train Epoch: 19 [0/1655543 (0%)]\tLoss: 0.005878\n", | |
"Insample R^2 for iter: 20 is 0.732766\n", | |
"\n", | |
"\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "4SZC-wFqqOk9", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"Calculate the out-of-sample Total R_square" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "n3ire4cIR77W", | |
"colab_type": "code", | |
"colab": {} | |
}, | |
"source": [ | |
"print('Out-of-sample R^2 is %f' % (np.mean(out_rsq_list)))" | |
], | |
"execution_count": 0, | |
"outputs": [] | |
} | |
] | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment