Skip to content

Instantly share code, notes, and snippets.

@Flova
Last active August 26, 2020 11:17
Show Gist options
  • Save Flova/d7b91dbfc4719548a9f1edd790a12a45 to your computer and use it in GitHub Desktop.
Save Flova/d7b91dbfc4719548a9f1edd790a12a45 to your computer and use it in GitHub Desktop.
Karma Regression MLP
Display the source blob
Display the rendered blob
Raw
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "Untitled4.ipynb",
"provenance": [],
"authorship_tag": "ABX9TyPolPm0ETqyevFzbm0w5VOX",
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
}
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/gist/Flova/1535293ae060bcac8947e6fbc387c02f/untitled4.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"metadata": {
"id": "ELvsxdRJqZ44",
"colab_type": "code",
"colab": {}
},
"source": [
"import numpy as np\n",
"from sklearn.model_selection import train_test_split\n",
"from sklearn import preprocessing\n",
"from sklearn.neural_network import MLPRegressor"
],
"execution_count": 5,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "8-OP3VIpqsIv",
"colab_type": "code",
"colab": {}
},
"source": [
"raw_data = np.genfromtxt(\"data\", dtype=np.float, delimiter=',', skip_header=1)"
],
"execution_count": 6,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "nw0lt9AyrTdF",
"colab_type": "code",
"colab": {}
},
"source": [
"X, Y = np.hsplit(raw_data, [-1])\n",
"\n",
"# Scale\n",
"min_max_scaler = preprocessing.MinMaxScaler()\n",
"X_minmax = min_max_scaler.fit_transform(X)\n",
"\n",
"# Test Train Split\n",
"X_train, X_test, Y_train, Y_test = train_test_split(\n",
" X_minmax, Y, test_size=0.3, random_state=42)"
],
"execution_count": 7,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "sbOYq2xssxwP",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000
},
"outputId": "64a5fe87-0f5c-46c6-a0eb-75fdf02290d5"
},
"source": [
"reg = MLPRegressor(hidden_layer_sizes=(4, ), early_stopping=True, max_iter=20000, epsilon=1e-8, tol=0.000000100, random_state=1, verbose=1, validation_fraction=0.1)\n",
"\n",
"reg.fit(X_train, Y_train)"
],
"execution_count": 13,
"outputs": [
{
"output_type": "stream",
"text": [
"/usr/local/lib/python3.6/dist-packages/sklearn/neural_network/_multilayer_perceptron.py:1342: DataConversionWarning: A column-vector y was passed when a 1d array was expected. Please change the shape of y to (n_samples, ), for example using ravel().\n",
" y = column_or_1d(y, warn=True)\n"
],
"name": "stderr"
},
{
"output_type": "stream",
"text": [
"Iteration 1, loss = 1.07186412\n",
"Validation score: -19.441446\n",
"Iteration 2, loss = 1.06142723\n",
"Validation score: -19.239554\n",
"Iteration 3, loss = 1.05110499\n",
"Validation score: -19.039449\n",
"Iteration 4, loss = 1.04089975\n",
"Validation score: -18.841171\n",
"Iteration 5, loss = 1.03084570\n",
"Validation score: -18.644792\n",
"Iteration 6, loss = 1.02092488\n",
"Validation score: -18.450372\n",
"Iteration 7, loss = 1.01112238\n",
"Validation score: -18.257867\n",
"Iteration 8, loss = 1.00141073\n",
"Validation score: -18.067254\n",
"Iteration 9, loss = 0.99183575\n",
"Validation score: -17.878604\n",
"Iteration 10, loss = 0.98237916\n",
"Validation score: -17.691931\n",
"Iteration 11, loss = 0.97305222\n",
"Validation score: -17.507312\n",
"Iteration 12, loss = 0.96385568\n",
"Validation score: -17.329073\n",
"Iteration 13, loss = 0.95483384\n",
"Validation score: -17.154251\n",
"Iteration 14, loss = 0.94593785\n",
"Validation score: -16.981444\n",
"Iteration 15, loss = 0.93716447\n",
"Validation score: -16.810631\n",
"Iteration 16, loss = 0.92848907\n",
"Validation score: -16.641808\n",
"Iteration 17, loss = 0.91999339\n",
"Validation score: -16.475038\n",
"Iteration 18, loss = 0.91162250\n",
"Validation score: -16.310265\n",
"Iteration 19, loss = 0.90334534\n",
"Validation score: -16.147443\n",
"Iteration 20, loss = 0.89515993\n",
"Validation score: -15.986532\n",
"Iteration 21, loss = 0.88706454\n",
"Validation score: -15.827495\n",
"Iteration 22, loss = 0.87912436\n",
"Validation score: -15.670465\n",
"Iteration 23, loss = 0.87134507\n",
"Validation score: -15.517239\n",
"Iteration 24, loss = 0.86367924\n",
"Validation score: -15.368693\n",
"Iteration 25, loss = 0.85612380\n",
"Validation score: -15.224371\n",
"Iteration 26, loss = 0.84865682\n",
"Validation score: -15.082733\n",
"Iteration 27, loss = 0.84127617\n",
"Validation score: -14.942793\n",
"Iteration 28, loss = 0.83397990\n",
"Validation score: -14.804516\n",
"Iteration 29, loss = 0.82677361\n",
"Validation score: -14.667888\n",
"Iteration 30, loss = 0.81965179\n",
"Validation score: -14.535754\n",
"Iteration 31, loss = 0.81262622\n",
"Validation score: -14.405878\n",
"Iteration 32, loss = 0.80569572\n",
"Validation score: -14.277545\n",
"Iteration 33, loss = 0.79888787\n",
"Validation score: -14.150756\n",
"Iteration 34, loss = 0.79217178\n",
"Validation score: -14.025495\n",
"Iteration 35, loss = 0.78554078\n",
"Validation score: -13.901725\n",
"Iteration 36, loss = 0.77898597\n",
"Validation score: -13.779412\n",
"Iteration 37, loss = 0.77250563\n",
"Validation score: -13.658526\n",
"Iteration 38, loss = 0.76614169\n",
"Validation score: -13.539094\n",
"Iteration 39, loss = 0.75985531\n",
"Validation score: -13.422082\n",
"Iteration 40, loss = 0.75366461\n",
"Validation score: -13.315360\n",
"Iteration 41, loss = 0.74763926\n",
"Validation score: -13.209926\n",
"Iteration 42, loss = 0.74170225\n",
"Validation score: -13.106770\n",
"Iteration 43, loss = 0.73584817\n",
"Validation score: -13.005402\n",
"Iteration 44, loss = 0.73012049\n",
"Validation score: -12.905254\n",
"Iteration 45, loss = 0.72447075\n",
"Validation score: -12.806385\n",
"Iteration 46, loss = 0.71897021\n",
"Validation score: -12.708755\n",
"Iteration 47, loss = 0.71354514\n",
"Validation score: -12.612316\n",
"Iteration 48, loss = 0.70827062\n",
"Validation score: -12.517151\n",
"Iteration 49, loss = 0.70309536\n",
"Validation score: -12.423206\n",
"Iteration 50, loss = 0.69802279\n",
"Validation score: -12.330504\n",
"Iteration 51, loss = 0.69304696\n",
"Validation score: -12.238992\n",
"Iteration 52, loss = 0.68816306\n",
"Validation score: -12.148673\n",
"Iteration 53, loss = 0.68336068\n",
"Validation score: -12.059494\n",
"Iteration 54, loss = 0.67865670\n",
"Validation score: -11.971475\n",
"Iteration 55, loss = 0.67403270\n",
"Validation score: -11.884579\n",
"Iteration 56, loss = 0.66950479\n",
"Validation score: -11.799513\n",
"Iteration 57, loss = 0.66506025\n",
"Validation score: -11.715585\n",
"Iteration 58, loss = 0.66070493\n",
"Validation score: -11.632727\n",
"Iteration 59, loss = 0.65642324\n",
"Validation score: -11.550912\n",
"Iteration 60, loss = 0.65220033\n",
"Validation score: -11.470093\n",
"Iteration 61, loss = 0.64803021\n",
"Validation score: -11.390226\n",
"Iteration 62, loss = 0.64391050\n",
"Validation score: -11.311271\n",
"Iteration 63, loss = 0.63983903\n",
"Validation score: -11.233193\n",
"Iteration 64, loss = 0.63581383\n",
"Validation score: -11.155957\n",
"Iteration 65, loss = 0.63183308\n",
"Validation score: -11.079533\n",
"Iteration 66, loss = 0.62789513\n",
"Validation score: -11.003894\n",
"Iteration 67, loss = 0.62403677\n",
"Validation score: -10.929086\n",
"Iteration 68, loss = 0.62023194\n",
"Validation score: -10.855078\n",
"Iteration 69, loss = 0.61647449\n",
"Validation score: -10.781852\n",
"Iteration 70, loss = 0.61275756\n",
"Validation score: -10.709378\n",
"Iteration 71, loss = 0.60911403\n",
"Validation score: -10.637750\n",
"Iteration 72, loss = 0.60558421\n",
"Validation score: -10.567046\n",
"Iteration 73, loss = 0.60215476\n",
"Validation score: -10.497224\n",
"Iteration 74, loss = 0.59877647\n",
"Validation score: -10.428265\n",
"Iteration 75, loss = 0.59547170\n",
"Validation score: -10.364940\n",
"Iteration 76, loss = 0.59224039\n",
"Validation score: -10.302434\n",
"Iteration 77, loss = 0.58905030\n",
"Validation score: -10.240653\n",
"Iteration 78, loss = 0.58589891\n",
"Validation score: -10.179562\n",
"Iteration 79, loss = 0.58278661\n",
"Validation score: -10.119200\n",
"Iteration 80, loss = 0.57971103\n",
"Validation score: -10.060232\n",
"Iteration 81, loss = 0.57670937\n",
"Validation score: -10.001938\n",
"Iteration 82, loss = 0.57375986\n",
"Validation score: -9.944297\n",
"Iteration 83, loss = 0.57085260\n",
"Validation score: -9.887898\n",
"Iteration 84, loss = 0.56797768\n",
"Validation score: -9.832533\n",
"Iteration 85, loss = 0.56513636\n",
"Validation score: -9.777725\n",
"Iteration 86, loss = 0.56232802\n",
"Validation score: -9.723451\n",
"Iteration 87, loss = 0.55956514\n",
"Validation score: -9.669719\n",
"Iteration 88, loss = 0.55683848\n",
"Validation score: -9.617503\n",
"Iteration 89, loss = 0.55414485\n",
"Validation score: -9.566800\n",
"Iteration 90, loss = 0.55152646\n",
"Validation score: -9.516619\n",
"Iteration 91, loss = 0.54893629\n",
"Validation score: -9.466932\n",
"Iteration 92, loss = 0.54638917\n",
"Validation score: -9.417759\n",
"Iteration 93, loss = 0.54388789\n",
"Validation score: -9.369110\n",
"Iteration 94, loss = 0.54144727\n",
"Validation score: -9.320979\n",
"Iteration 95, loss = 0.53903948\n",
"Validation score: -9.273353\n",
"Iteration 96, loss = 0.53667744\n",
"Validation score: -9.226231\n",
"Iteration 97, loss = 0.53434151\n",
"Validation score: -9.179586\n",
"Iteration 98, loss = 0.53202971\n",
"Validation score: -9.133393\n",
"Iteration 99, loss = 0.52974929\n",
"Validation score: -9.087651\n",
"Iteration 100, loss = 0.52749914\n",
"Validation score: -9.042354\n",
"Iteration 101, loss = 0.52527842\n",
"Validation score: -8.997479\n",
"Iteration 102, loss = 0.52312324\n",
"Validation score: -8.953149\n",
"Iteration 103, loss = 0.52103764\n",
"Validation score: -8.909353\n",
"Iteration 104, loss = 0.51898886\n",
"Validation score: -8.866057\n",
"Iteration 105, loss = 0.51696256\n",
"Validation score: -8.823231\n",
"Iteration 106, loss = 0.51495806\n",
"Validation score: -8.780857\n",
"Iteration 107, loss = 0.51298023\n",
"Validation score: -8.738913\n",
"Iteration 108, loss = 0.51102375\n",
"Validation score: -8.697371\n",
"Iteration 109, loss = 0.50909000\n",
"Validation score: -8.656236\n",
"Iteration 110, loss = 0.50719078\n",
"Validation score: -8.615522\n",
"Iteration 111, loss = 0.50532136\n",
"Validation score: -8.575202\n",
"Iteration 112, loss = 0.50346899\n",
"Validation score: -8.535255\n",
"Iteration 113, loss = 0.50163270\n",
"Validation score: -8.495658\n",
"Iteration 114, loss = 0.49981168\n",
"Validation score: -8.456404\n",
"Iteration 115, loss = 0.49801065\n",
"Validation score: -8.417474\n",
"Iteration 116, loss = 0.49622930\n",
"Validation score: -8.378911\n",
"Iteration 117, loss = 0.49448579\n",
"Validation score: -8.340692\n",
"Iteration 118, loss = 0.49275650\n",
"Validation score: -8.302798\n",
"Iteration 119, loss = 0.49104062\n",
"Validation score: -8.265210\n",
"Iteration 120, loss = 0.48933744\n",
"Validation score: -8.227912\n",
"Iteration 121, loss = 0.48764631\n",
"Validation score: -8.190888\n",
"Iteration 122, loss = 0.48596663\n",
"Validation score: -8.154125\n",
"Iteration 123, loss = 0.48429784\n",
"Validation score: -8.117611\n",
"Iteration 124, loss = 0.48264491\n",
"Validation score: -8.081357\n",
"Iteration 125, loss = 0.48100805\n",
"Validation score: -8.048289\n",
"Iteration 126, loss = 0.47938259\n",
"Validation score: -8.016741\n",
"Iteration 127, loss = 0.47777125\n",
"Validation score: -7.985369\n",
"Iteration 128, loss = 0.47617545\n",
"Validation score: -7.954185\n",
"Iteration 129, loss = 0.47460103\n",
"Validation score: -7.923181\n",
"Iteration 130, loss = 0.47303649\n",
"Validation score: -7.892348\n",
"Iteration 131, loss = 0.47148135\n",
"Validation score: -7.861681\n",
"Iteration 132, loss = 0.46993514\n",
"Validation score: -7.831171\n",
"Iteration 133, loss = 0.46840431\n",
"Validation score: -7.800828\n",
"Iteration 134, loss = 0.46688564\n",
"Validation score: -7.770644\n",
"Iteration 135, loss = 0.46537545\n",
"Validation score: -7.740613\n",
"Iteration 136, loss = 0.46387331\n",
"Validation score: -7.710727\n",
"Iteration 137, loss = 0.46237885\n",
"Validation score: -7.680982\n",
"Iteration 138, loss = 0.46089175\n",
"Validation score: -7.651373\n",
"Iteration 139, loss = 0.45941168\n",
"Validation score: -7.621893\n",
"Iteration 140, loss = 0.45793835\n",
"Validation score: -7.592539\n",
"Iteration 141, loss = 0.45647150\n",
"Validation score: -7.563306\n",
"Iteration 142, loss = 0.45501086\n",
"Validation score: -7.534190\n",
"Iteration 143, loss = 0.45355622\n",
"Validation score: -7.505188\n",
"Iteration 144, loss = 0.45210734\n",
"Validation score: -7.476297\n",
"Iteration 145, loss = 0.45066404\n",
"Validation score: -7.447511\n",
"Iteration 146, loss = 0.44922612\n",
"Validation score: -7.418830\n",
"Iteration 147, loss = 0.44779339\n",
"Validation score: -7.390249\n",
"Iteration 148, loss = 0.44636571\n",
"Validation score: -7.361767\n",
"Iteration 149, loss = 0.44494290\n",
"Validation score: -7.333379\n",
"Iteration 150, loss = 0.44352486\n",
"Validation score: -7.305086\n",
"Iteration 151, loss = 0.44213564\n",
"Validation score: -7.276926\n",
"Iteration 152, loss = 0.44076296\n",
"Validation score: -7.248909\n",
"Iteration 153, loss = 0.43940772\n",
"Validation score: -7.221028\n",
"Iteration 154, loss = 0.43806049\n",
"Validation score: -7.193276\n",
"Iteration 155, loss = 0.43671963\n",
"Validation score: -7.165646\n",
"Iteration 156, loss = 0.43538622\n",
"Validation score: -7.138142\n",
"Iteration 157, loss = 0.43406338\n",
"Validation score: -7.110757\n",
"Iteration 158, loss = 0.43274859\n",
"Validation score: -7.083496\n",
"Iteration 159, loss = 0.43144469\n",
"Validation score: -7.056352\n",
"Iteration 160, loss = 0.43014619\n",
"Validation score: -7.029318\n",
"Iteration 161, loss = 0.42885274\n",
"Validation score: -7.002388\n",
"Iteration 162, loss = 0.42756784\n",
"Validation score: -6.975565\n",
"Iteration 163, loss = 0.42628910\n",
"Validation score: -6.948844\n",
"Iteration 164, loss = 0.42501483\n",
"Validation score: -6.922218\n",
"Iteration 165, loss = 0.42374513\n",
"Validation score: -6.895683\n",
"Iteration 166, loss = 0.42248199\n",
"Validation score: -6.869243\n",
"Iteration 167, loss = 0.42122955\n",
"Validation score: -6.842904\n",
"Iteration 168, loss = 0.41998443\n",
"Validation score: -6.816659\n",
"Iteration 169, loss = 0.41874686\n",
"Validation score: -6.790511\n",
"Iteration 170, loss = 0.41751383\n",
"Validation score: -6.764454\n",
"Iteration 171, loss = 0.41628447\n",
"Validation score: -6.738481\n",
"Iteration 172, loss = 0.41505850\n",
"Validation score: -6.712585\n",
"Iteration 173, loss = 0.41383565\n",
"Validation score: -6.686764\n",
"Iteration 174, loss = 0.41261567\n",
"Validation score: -6.661010\n",
"Iteration 175, loss = 0.41139835\n",
"Validation score: -6.635320\n",
"Iteration 176, loss = 0.41018347\n",
"Validation score: -6.609689\n",
"Iteration 177, loss = 0.40897084\n",
"Validation score: -6.584114\n",
"Iteration 178, loss = 0.40776027\n",
"Validation score: -6.558590\n",
"Iteration 179, loss = 0.40655160\n",
"Validation score: -6.533279\n",
"Iteration 180, loss = 0.40534464\n",
"Validation score: -6.508375\n",
"Iteration 181, loss = 0.40413927\n",
"Validation score: -6.483508\n",
"Iteration 182, loss = 0.40293532\n",
"Validation score: -6.458677\n",
"Iteration 183, loss = 0.40173267\n",
"Validation score: -6.433877\n",
"Iteration 184, loss = 0.40053118\n",
"Validation score: -6.409108\n",
"Iteration 185, loss = 0.39933175\n",
"Validation score: -6.384373\n",
"Iteration 186, loss = 0.39814343\n",
"Validation score: -6.359685\n",
"Iteration 187, loss = 0.39695809\n",
"Validation score: -6.335042\n",
"Iteration 188, loss = 0.39577580\n",
"Validation score: -6.310440\n",
"Iteration 189, loss = 0.39459492\n",
"Validation score: -6.285876\n",
"Iteration 190, loss = 0.39341526\n",
"Validation score: -6.261346\n",
"Iteration 191, loss = 0.39223663\n",
"Validation score: -6.236845\n",
"Iteration 192, loss = 0.39105886\n",
"Validation score: -6.212370\n",
"Iteration 193, loss = 0.38988177\n",
"Validation score: -6.187919\n",
"Iteration 194, loss = 0.38870521\n",
"Validation score: -6.163488\n",
"Iteration 195, loss = 0.38752904\n",
"Validation score: -6.139074\n",
"Iteration 196, loss = 0.38635312\n",
"Validation score: -6.114674\n",
"Iteration 197, loss = 0.38518010\n",
"Validation score: -6.090302\n",
"Iteration 198, loss = 0.38401131\n",
"Validation score: -6.065951\n",
"Iteration 199, loss = 0.38284282\n",
"Validation score: -6.041619\n",
"Iteration 200, loss = 0.38167446\n",
"Validation score: -6.017303\n",
"Iteration 201, loss = 0.38050608\n",
"Validation score: -5.992998\n",
"Iteration 202, loss = 0.37933752\n",
"Validation score: -5.968703\n",
"Iteration 203, loss = 0.37816994\n",
"Validation score: -5.944416\n",
"Iteration 204, loss = 0.37700221\n",
"Validation score: -5.920135\n",
"Iteration 205, loss = 0.37583400\n",
"Validation score: -5.895857\n",
"Iteration 206, loss = 0.37466518\n",
"Validation score: -5.871579\n",
"Iteration 207, loss = 0.37349562\n",
"Validation score: -5.847299\n",
"Iteration 208, loss = 0.37232520\n",
"Validation score: -5.823015\n",
"Iteration 209, loss = 0.37115381\n",
"Validation score: -5.798723\n",
"Iteration 210, loss = 0.36998748\n",
"Validation score: -5.774444\n",
"Iteration 211, loss = 0.36882509\n",
"Validation score: -5.750173\n",
"Iteration 212, loss = 0.36766208\n",
"Validation score: -5.725905\n",
"Iteration 213, loss = 0.36649827\n",
"Validation score: -5.701638\n",
"Iteration 214, loss = 0.36533550\n",
"Validation score: -5.677375\n",
"Iteration 215, loss = 0.36417381\n",
"Validation score: -5.653113\n",
"Iteration 216, loss = 0.36301106\n",
"Validation score: -5.628847\n",
"Iteration 217, loss = 0.36184708\n",
"Validation score: -5.604575\n",
"Iteration 218, loss = 0.36068171\n",
"Validation score: -5.580292\n",
"Iteration 219, loss = 0.35951480\n",
"Validation score: -5.555996\n",
"Iteration 220, loss = 0.35834621\n",
"Validation score: -5.531683\n",
"Iteration 221, loss = 0.35717580\n",
"Validation score: -5.507351\n",
"Iteration 222, loss = 0.35600346\n",
"Validation score: -5.482998\n",
"Iteration 223, loss = 0.35482907\n",
"Validation score: -5.458619\n",
"Iteration 224, loss = 0.35365250\n",
"Validation score: -5.434214\n",
"Iteration 225, loss = 0.35247365\n",
"Validation score: -5.409781\n",
"Iteration 226, loss = 0.35129242\n",
"Validation score: -5.385316\n",
"Iteration 227, loss = 0.35010992\n",
"Validation score: -5.360821\n",
"Iteration 228, loss = 0.34892514\n",
"Validation score: -5.336294\n",
"Iteration 229, loss = 0.34773780\n",
"Validation score: -5.311732\n",
"Iteration 230, loss = 0.34654780\n",
"Validation score: -5.287133\n",
"Iteration 231, loss = 0.34535503\n",
"Validation score: -5.262497\n",
"Iteration 232, loss = 0.34415940\n",
"Validation score: -5.237820\n",
"Iteration 233, loss = 0.34296084\n",
"Validation score: -5.213102\n",
"Iteration 234, loss = 0.34175907\n",
"Validation score: -5.188339\n",
"Iteration 235, loss = 0.34055370\n",
"Validation score: -5.163530\n",
"Iteration 236, loss = 0.33934617\n",
"Validation score: -5.138678\n",
"Iteration 237, loss = 0.33813566\n",
"Validation score: -5.113780\n",
"Iteration 238, loss = 0.33692183\n",
"Validation score: -5.088836\n",
"Iteration 239, loss = 0.33570461\n",
"Validation score: -5.063842\n",
"Iteration 240, loss = 0.33448390\n",
"Validation score: -5.038799\n",
"Iteration 241, loss = 0.33325964\n",
"Validation score: -5.013705\n",
"Iteration 242, loss = 0.33203176\n",
"Validation score: -4.988557\n",
"Iteration 243, loss = 0.33080018\n",
"Validation score: -4.963356\n",
"Iteration 244, loss = 0.32956484\n",
"Validation score: -4.938099\n",
"Iteration 245, loss = 0.32832680\n",
"Validation score: -4.912791\n",
"Iteration 246, loss = 0.32708593\n",
"Validation score: -4.887431\n",
"Iteration 247, loss = 0.32584126\n",
"Validation score: -4.862016\n",
"Iteration 248, loss = 0.32459273\n",
"Validation score: -4.836545\n",
"Iteration 249, loss = 0.32334024\n",
"Validation score: -4.811016\n",
"Iteration 250, loss = 0.32208375\n",
"Validation score: -4.785430\n",
"Iteration 251, loss = 0.32082318\n",
"Validation score: -4.759784\n",
"Iteration 252, loss = 0.31955847\n",
"Validation score: -4.734077\n",
"Iteration 253, loss = 0.31828956\n",
"Validation score: -4.708308\n",
"Iteration 254, loss = 0.31701640\n",
"Validation score: -4.682477\n",
"Iteration 255, loss = 0.31573895\n",
"Validation score: -4.656582\n",
"Iteration 256, loss = 0.31445714\n",
"Validation score: -4.630622\n",
"Iteration 257, loss = 0.31317160\n",
"Validation score: -4.604600\n",
"Iteration 258, loss = 0.31188194\n",
"Validation score: -4.578514\n",
"Iteration 259, loss = 0.31058787\n",
"Validation score: -4.552362\n",
"Iteration 260, loss = 0.30929059\n",
"Validation score: -4.526151\n",
"Iteration 261, loss = 0.30799005\n",
"Validation score: -4.499877\n",
"Iteration 262, loss = 0.30668511\n",
"Validation score: -4.473540\n",
"Iteration 263, loss = 0.30537570\n",
"Validation score: -4.447139\n",
"Iteration 264, loss = 0.30406179\n",
"Validation score: -4.420674\n",
"Iteration 265, loss = 0.30274404\n",
"Validation score: -4.394146\n",
"Iteration 266, loss = 0.30142235\n",
"Validation score: -4.367556\n",
"Iteration 267, loss = 0.30009613\n",
"Validation score: -4.340901\n",
"Iteration 268, loss = 0.29876532\n",
"Validation score: -4.314182\n",
"Iteration 269, loss = 0.29742989\n",
"Validation score: -4.287397\n",
"Iteration 270, loss = 0.29608981\n",
"Validation score: -4.260547\n",
"Iteration 271, loss = 0.29474503\n",
"Validation score: -4.233629\n",
"Iteration 272, loss = 0.29339554\n",
"Validation score: -4.206645\n",
"Iteration 273, loss = 0.29204132\n",
"Validation score: -4.179593\n",
"Iteration 274, loss = 0.29068234\n",
"Validation score: -4.152474\n",
"Iteration 275, loss = 0.28931860\n",
"Validation score: -4.125288\n",
"Iteration 276, loss = 0.28795037\n",
"Validation score: -4.098036\n",
"Iteration 277, loss = 0.28657787\n",
"Validation score: -4.070717\n",
"Iteration 278, loss = 0.28520064\n",
"Validation score: -4.043333\n",
"Iteration 279, loss = 0.28381866\n",
"Validation score: -4.015883\n",
"Iteration 280, loss = 0.28243193\n",
"Validation score: -3.988367\n",
"Iteration 281, loss = 0.28104045\n",
"Validation score: -3.960785\n",
"Iteration 282, loss = 0.27964423\n",
"Validation score: -3.933139\n",
"Iteration 283, loss = 0.27824328\n",
"Validation score: -3.905427\n",
"Iteration 284, loss = 0.27683761\n",
"Validation score: -3.877650\n",
"Iteration 285, loss = 0.27542723\n",
"Validation score: -3.849810\n",
"Iteration 286, loss = 0.27401217\n",
"Validation score: -3.821906\n",
"Iteration 287, loss = 0.27259245\n",
"Validation score: -3.793939\n",
"Iteration 288, loss = 0.27116809\n",
"Validation score: -3.765910\n",
"Iteration 289, loss = 0.26973913\n",
"Validation score: -3.737820\n",
"Iteration 290, loss = 0.26830560\n",
"Validation score: -3.709670\n",
"Iteration 291, loss = 0.26686754\n",
"Validation score: -3.681461\n",
"Iteration 292, loss = 0.26542498\n",
"Validation score: -3.653192\n",
"Iteration 293, loss = 0.26397796\n",
"Validation score: -3.624867\n",
"Iteration 294, loss = 0.26252654\n",
"Validation score: -3.596486\n",
"Iteration 295, loss = 0.26107077\n",
"Validation score: -3.568049\n",
"Iteration 296, loss = 0.25961069\n",
"Validation score: -3.539559\n",
"Iteration 297, loss = 0.25814636\n",
"Validation score: -3.511016\n",
"Iteration 298, loss = 0.25667784\n",
"Validation score: -3.482422\n",
"Iteration 299, loss = 0.25520519\n",
"Validation score: -3.453779\n",
"Iteration 300, loss = 0.25372848\n",
"Validation score: -3.425088\n",
"Iteration 301, loss = 0.25224777\n",
"Validation score: -3.396350\n",
"Iteration 302, loss = 0.25076314\n",
"Validation score: -3.367567\n",
"Iteration 303, loss = 0.24927466\n",
"Validation score: -3.338741\n",
"Iteration 304, loss = 0.24778240\n",
"Validation score: -3.309874\n",
"Iteration 305, loss = 0.24628645\n",
"Validation score: -3.280967\n",
"Iteration 306, loss = 0.24478689\n",
"Validation score: -3.252022\n",
"Iteration 307, loss = 0.24328381\n",
"Validation score: -3.223042\n",
"Iteration 308, loss = 0.24177729\n",
"Validation score: -3.194027\n",
"Iteration 309, loss = 0.24026743\n",
"Validation score: -3.164981\n",
"Iteration 310, loss = 0.23875503\n",
"Validation score: -3.135908\n",
"Iteration 311, loss = 0.23724014\n",
"Validation score: -3.106812\n",
"Iteration 312, loss = 0.23572229\n",
"Validation score: -3.077692\n",
"Iteration 313, loss = 0.23420157\n",
"Validation score: -3.048552\n",
"Iteration 314, loss = 0.23267807\n",
"Validation score: -3.019393\n",
"Iteration 315, loss = 0.23115229\n",
"Validation score: -2.990221\n",
"Iteration 316, loss = 0.22962449\n",
"Validation score: -2.961035\n",
"Iteration 317, loss = 0.22809429\n",
"Validation score: -2.931840\n",
"Iteration 318, loss = 0.22656177\n",
"Validation score: -2.902636\n",
"Iteration 319, loss = 0.22502705\n",
"Validation score: -2.873426\n",
"Iteration 320, loss = 0.22349024\n",
"Validation score: -2.844212\n",
"Iteration 321, loss = 0.22195145\n",
"Validation score: -2.814997\n",
"Iteration 322, loss = 0.22041079\n",
"Validation score: -2.785782\n",
"Iteration 323, loss = 0.21886840\n",
"Validation score: -2.756570\n",
"Iteration 324, loss = 0.21732474\n",
"Validation score: -2.727365\n",
"Iteration 325, loss = 0.21577982\n",
"Validation score: -2.698169\n",
"Iteration 326, loss = 0.21423357\n",
"Validation score: -2.668986\n",
"Iteration 327, loss = 0.21268613\n",
"Validation score: -2.639817\n",
"Iteration 328, loss = 0.21113762\n",
"Validation score: -2.610666\n",
"Iteration 329, loss = 0.20958819\n",
"Validation score: -2.581534\n",
"Iteration 330, loss = 0.20803797\n",
"Validation score: -2.552425\n",
"Iteration 331, loss = 0.20648711\n",
"Validation score: -2.523342\n",
"Iteration 332, loss = 0.20493574\n",
"Validation score: -2.494286\n",
"Iteration 333, loss = 0.20338401\n",
"Validation score: -2.465262\n",
"Iteration 334, loss = 0.20183207\n",
"Validation score: -2.436272\n",
"Iteration 335, loss = 0.20028008\n",
"Validation score: -2.407319\n",
"Iteration 336, loss = 0.19872819\n",
"Validation score: -2.378407\n",
"Iteration 337, loss = 0.19717655\n",
"Validation score: -2.349537\n",
"Iteration 338, loss = 0.19562533\n",
"Validation score: -2.320714\n",
"Iteration 339, loss = 0.19407467\n",
"Validation score: -2.291941\n",
"Iteration 340, loss = 0.19252475\n",
"Validation score: -2.263220\n",
"Iteration 341, loss = 0.19097573\n",
"Validation score: -2.234556\n",
"Iteration 342, loss = 0.18942777\n",
"Validation score: -2.205950\n",
"Iteration 343, loss = 0.18788103\n",
"Validation score: -2.177407\n",
"Iteration 344, loss = 0.18633570\n",
"Validation score: -2.148930\n",
"Iteration 345, loss = 0.18479193\n",
"Validation score: -2.120522\n",
"Iteration 346, loss = 0.18324990\n",
"Validation score: -2.092186\n",
"Iteration 347, loss = 0.18170978\n",
"Validation score: -2.063925\n",
"Iteration 348, loss = 0.18017174\n",
"Validation score: -2.035744\n",
"Iteration 349, loss = 0.17863595\n",
"Validation score: -2.007645\n",
"Iteration 350, loss = 0.17710260\n",
"Validation score: -1.979632\n",
"Iteration 351, loss = 0.17557185\n",
"Validation score: -1.951708\n",
"Iteration 352, loss = 0.17404388\n",
"Validation score: -1.923876\n",
"Iteration 353, loss = 0.17251887\n",
"Validation score: -1.896141\n",
"Iteration 354, loss = 0.17099699\n",
"Validation score: -1.868504\n",
"Iteration 355, loss = 0.16947842\n",
"Validation score: -1.840970\n",
"Iteration 356, loss = 0.16796335\n",
"Validation score: -1.813542\n",
"Iteration 357, loss = 0.16645193\n",
"Validation score: -1.786223\n",
"Iteration 358, loss = 0.16494469\n",
"Validation score: -1.759018\n",
"Iteration 359, loss = 0.16344150\n",
"Validation score: -1.731940\n",
"Iteration 360, loss = 0.16194271\n",
"Validation score: -1.704997\n",
"Iteration 361, loss = 0.16044907\n",
"Validation score: -1.678178\n",
"Iteration 362, loss = 0.15896005\n",
"Validation score: -1.651487\n",
"Iteration 363, loss = 0.15747583\n",
"Validation score: -1.624927\n",
"Iteration 364, loss = 0.15599657\n",
"Validation score: -1.598501\n",
"Iteration 365, loss = 0.15452243\n",
"Validation score: -1.572212\n",
"Iteration 366, loss = 0.15305359\n",
"Validation score: -1.546063\n",
"Iteration 367, loss = 0.15159021\n",
"Validation score: -1.520057\n",
"Iteration 368, loss = 0.15013245\n",
"Validation score: -1.494196\n",
"Iteration 369, loss = 0.14868049\n",
"Validation score: -1.468484\n",
"Iteration 370, loss = 0.14723448\n",
"Validation score: -1.442923\n",
"Iteration 371, loss = 0.14579460\n",
"Validation score: -1.417518\n",
"Iteration 372, loss = 0.14436100\n",
"Validation score: -1.392269\n",
"Iteration 373, loss = 0.14293386\n",
"Validation score: -1.367181\n",
"Iteration 374, loss = 0.14151332\n",
"Validation score: -1.342256\n",
"Iteration 375, loss = 0.14009956\n",
"Validation score: -1.317497\n",
"Iteration 376, loss = 0.13869273\n",
"Validation score: -1.292907\n",
"Iteration 377, loss = 0.13729299\n",
"Validation score: -1.268489\n",
"Iteration 378, loss = 0.13590065\n",
"Validation score: -1.244245\n",
"Iteration 379, loss = 0.13451575\n",
"Validation score: -1.220178\n",
"Iteration 380, loss = 0.13313840\n",
"Validation score: -1.196292\n",
"Iteration 381, loss = 0.13176874\n",
"Validation score: -1.172588\n",
"Iteration 382, loss = 0.13040693\n",
"Validation score: -1.149068\n",
"Iteration 383, loss = 0.12905311\n",
"Validation score: -1.125736\n",
"Iteration 384, loss = 0.12770742\n",
"Validation score: -1.102594\n",
"Iteration 385, loss = 0.12637000\n",
"Validation score: -1.079643\n",
"Iteration 386, loss = 0.12504100\n",
"Validation score: -1.056887\n",
"Iteration 387, loss = 0.12372054\n",
"Validation score: -1.034328\n",
"Iteration 388, loss = 0.12240877\n",
"Validation score: -1.011967\n",
"Iteration 389, loss = 0.12110582\n",
"Validation score: -0.989807\n",
"Iteration 390, loss = 0.11981181\n",
"Validation score: -0.967850\n",
"Iteration 391, loss = 0.11852688\n",
"Validation score: -0.946098\n",
"Iteration 392, loss = 0.11725115\n",
"Validation score: -0.924553\n",
"Iteration 393, loss = 0.11598474\n",
"Validation score: -0.903217\n",
"Iteration 394, loss = 0.11472777\n",
"Validation score: -0.882091\n",
"Iteration 395, loss = 0.11348036\n",
"Validation score: -0.861178\n",
"Iteration 396, loss = 0.11224269\n",
"Validation score: -0.840480\n",
"Iteration 397, loss = 0.11101506\n",
"Validation score: -0.819998\n",
"Iteration 398, loss = 0.10979734\n",
"Validation score: -0.799735\n",
"Iteration 399, loss = 0.10858963\n",
"Validation score: -0.779691\n",
"Iteration 400, loss = 0.10739203\n",
"Validation score: -0.759868\n",
"Iteration 401, loss = 0.10620463\n",
"Validation score: -0.740268\n",
"Iteration 402, loss = 0.10502753\n",
"Validation score: -0.720890\n",
"Iteration 403, loss = 0.10386082\n",
"Validation score: -0.701738\n",
"Iteration 404, loss = 0.10270457\n",
"Validation score: -0.682811\n",
"Iteration 405, loss = 0.10155889\n",
"Validation score: -0.664110\n",
"Iteration 406, loss = 0.10042384\n",
"Validation score: -0.645638\n",
"Iteration 407, loss = 0.09929949\n",
"Validation score: -0.627393\n",
"Iteration 408, loss = 0.09818594\n",
"Validation score: -0.609379\n",
"Iteration 409, loss = 0.09708324\n",
"Validation score: -0.591594\n",
"Iteration 410, loss = 0.09599146\n",
"Validation score: -0.574040\n",
"Iteration 411, loss = 0.09491065\n",
"Validation score: -0.556718\n",
"Iteration 412, loss = 0.09384088\n",
"Validation score: -0.539627\n",
"Iteration 413, loss = 0.09278220\n",
"Validation score: -0.522769\n",
"Iteration 414, loss = 0.09173467\n",
"Validation score: -0.506143\n",
"Iteration 415, loss = 0.09069848\n",
"Validation score: -0.489751\n",
"Iteration 416, loss = 0.08967364\n",
"Validation score: -0.473593\n",
"Iteration 417, loss = 0.08866010\n",
"Validation score: -0.457669\n",
"Iteration 418, loss = 0.08765804\n",
"Validation score: -0.441981\n",
"Iteration 419, loss = 0.08666743\n",
"Validation score: -0.426526\n",
"Iteration 420, loss = 0.08568822\n",
"Validation score: -0.411307\n",
"Iteration 421, loss = 0.08472043\n",
"Validation score: -0.396321\n",
"Iteration 422, loss = 0.08376408\n",
"Validation score: -0.381570\n",
"Iteration 423, loss = 0.08281920\n",
"Validation score: -0.367052\n",
"Iteration 424, loss = 0.08188578\n",
"Validation score: -0.352766\n",
"Iteration 425, loss = 0.08096384\n",
"Validation score: -0.338714\n",
"Iteration 426, loss = 0.08005339\n",
"Validation score: -0.324893\n",
"Iteration 427, loss = 0.07915443\n",
"Validation score: -0.311303\n",
"Iteration 428, loss = 0.07826696\n",
"Validation score: -0.297944\n",
"Iteration 429, loss = 0.07739097\n",
"Validation score: -0.284815\n",
"Iteration 430, loss = 0.07652646\n",
"Validation score: -0.271914\n",
"Iteration 431, loss = 0.07567342\n",
"Validation score: -0.259242\n",
"Iteration 432, loss = 0.07483183\n",
"Validation score: -0.246796\n",
"Iteration 433, loss = 0.07400168\n",
"Validation score: -0.234576\n",
"Iteration 434, loss = 0.07318294\n",
"Validation score: -0.222582\n",
"Iteration 435, loss = 0.07237559\n",
"Validation score: -0.210811\n",
"Iteration 436, loss = 0.07157960\n",
"Validation score: -0.199262\n",
"Iteration 437, loss = 0.07079495\n",
"Validation score: -0.187935\n",
"Iteration 438, loss = 0.07002160\n",
"Validation score: -0.176827\n",
"Iteration 439, loss = 0.06925951\n",
"Validation score: -0.165938\n",
"Iteration 440, loss = 0.06850864\n",
"Validation score: -0.155266\n",
"Iteration 441, loss = 0.06776896\n",
"Validation score: -0.144810\n",
"Iteration 442, loss = 0.06704041\n",
"Validation score: -0.134567\n",
"Iteration 443, loss = 0.06632294\n",
"Validation score: -0.124537\n",
"Iteration 444, loss = 0.06561651\n",
"Validation score: -0.114717\n",
"Iteration 445, loss = 0.06492106\n",
"Validation score: -0.105107\n",
"Iteration 446, loss = 0.06423670\n",
"Validation score: -0.095704\n",
"Iteration 447, loss = 0.06356336\n",
"Validation score: -0.086508\n",
"Iteration 448, loss = 0.06290081\n",
"Validation score: -0.077516\n",
"Iteration 449, loss = 0.06224901\n",
"Validation score: -0.068726\n",
"Iteration 450, loss = 0.06160787\n",
"Validation score: -0.060136\n",
"Iteration 451, loss = 0.06097732\n",
"Validation score: -0.051744\n",
"Iteration 452, loss = 0.06035728\n",
"Validation score: -0.043547\n",
"Iteration 453, loss = 0.05974768\n",
"Validation score: -0.035545\n",
"Iteration 454, loss = 0.05914844\n",
"Validation score: -0.027734\n",
"Iteration 455, loss = 0.05855948\n",
"Validation score: -0.020112\n",
"Iteration 456, loss = 0.05798071\n",
"Validation score: -0.012677\n",
"Iteration 457, loss = 0.05741205\n",
"Validation score: -0.005427\n",
"Iteration 458, loss = 0.05685341\n",
"Validation score: 0.001640\n",
"Iteration 459, loss = 0.05630470\n",
"Validation score: 0.008527\n",
"Iteration 460, loss = 0.05576583\n",
"Validation score: 0.015236\n",
"Iteration 461, loss = 0.05523671\n",
"Validation score: 0.021770\n",
"Iteration 462, loss = 0.05471725\n",
"Validation score: 0.028131\n",
"Iteration 463, loss = 0.05420735\n",
"Validation score: 0.034321\n",
"Iteration 464, loss = 0.05370691\n",
"Validation score: 0.040343\n",
"Iteration 465, loss = 0.05321583\n",
"Validation score: 0.046199\n",
"Iteration 466, loss = 0.05273402\n",
"Validation score: 0.051892\n",
"Iteration 467, loss = 0.05226138\n",
"Validation score: 0.057424\n",
"Iteration 468, loss = 0.05179779\n",
"Validation score: 0.062797\n",
"Iteration 469, loss = 0.05134316\n",
"Validation score: 0.068015\n",
"Iteration 470, loss = 0.05089739\n",
"Validation score: 0.073079\n",
"Iteration 471, loss = 0.05046037\n",
"Validation score: 0.077993\n",
"Iteration 472, loss = 0.05003198\n",
"Validation score: 0.082758\n",
"Iteration 473, loss = 0.04961213\n",
"Validation score: 0.087378\n",
"Iteration 474, loss = 0.04920071\n",
"Validation score: 0.091854\n",
"Iteration 475, loss = 0.04879761\n",
"Validation score: 0.096190\n",
"Iteration 476, loss = 0.04840271\n",
"Validation score: 0.100387\n",
"Iteration 477, loss = 0.04801591\n",
"Validation score: 0.104450\n",
"Iteration 478, loss = 0.04763709\n",
"Validation score: 0.108379\n",
"Iteration 479, loss = 0.04726616\n",
"Validation score: 0.112178\n",
"Iteration 480, loss = 0.04690298\n",
"Validation score: 0.115849\n",
"Iteration 481, loss = 0.04654746\n",
"Validation score: 0.119394\n",
"Iteration 482, loss = 0.04619948\n",
"Validation score: 0.122817\n",
"Iteration 483, loss = 0.04585893\n",
"Validation score: 0.126120\n",
"Iteration 484, loss = 0.04552569\n",
"Validation score: 0.129305\n",
"Iteration 485, loss = 0.04519965\n",
"Validation score: 0.132375\n",
"Iteration 486, loss = 0.04488070\n",
"Validation score: 0.135332\n",
"Iteration 487, loss = 0.04456873\n",
"Validation score: 0.138179\n",
"Iteration 488, loss = 0.04426362\n",
"Validation score: 0.140918\n",
"Iteration 489, loss = 0.04396527\n",
"Validation score: 0.143552\n",
"Iteration 490, loss = 0.04367356\n",
"Validation score: 0.146083\n",
"Iteration 491, loss = 0.04338837\n",
"Validation score: 0.148514\n",
"Iteration 492, loss = 0.04310960\n",
"Validation score: 0.150847\n",
"Iteration 493, loss = 0.04283713\n",
"Validation score: 0.153084\n",
"Iteration 494, loss = 0.04257086\n",
"Validation score: 0.155228\n",
"Iteration 495, loss = 0.04231067\n",
"Validation score: 0.157282\n",
"Iteration 496, loss = 0.04205646\n",
"Validation score: 0.159247\n",
"Iteration 497, loss = 0.04180817\n",
"Validation score: 0.161125\n",
"Iteration 498, loss = 0.04156565\n",
"Validation score: 0.162920\n",
"Iteration 499, loss = 0.04132879\n",
"Validation score: 0.164632\n",
"Iteration 500, loss = 0.04109748\n",
"Validation score: 0.166265\n",
"Iteration 501, loss = 0.04087160\n",
"Validation score: 0.167820\n",
"Iteration 502, loss = 0.04065106\n",
"Validation score: 0.169300\n",
"Iteration 503, loss = 0.04043574\n",
"Validation score: 0.170708\n",
"Iteration 504, loss = 0.04022555\n",
"Validation score: 0.172044\n",
"Iteration 505, loss = 0.04002038\n",
"Validation score: 0.173312\n",
"Iteration 506, loss = 0.03982012\n",
"Validation score: 0.174513\n",
"Iteration 507, loss = 0.03962468\n",
"Validation score: 0.175650\n",
"Iteration 508, loss = 0.03943396\n",
"Validation score: 0.176724\n",
"Iteration 509, loss = 0.03924785\n",
"Validation score: 0.177738\n",
"Iteration 510, loss = 0.03906626\n",
"Validation score: 0.178694\n",
"Iteration 511, loss = 0.03888909\n",
"Validation score: 0.179592\n",
"Iteration 512, loss = 0.03871624\n",
"Validation score: 0.180436\n",
"Iteration 513, loss = 0.03854763\n",
"Validation score: 0.181227\n",
"Iteration 514, loss = 0.03838315\n",
"Validation score: 0.181967\n",
"Iteration 515, loss = 0.03822272\n",
"Validation score: 0.182659\n",
"Iteration 516, loss = 0.03806624\n",
"Validation score: 0.183302\n",
"Iteration 517, loss = 0.03791362\n",
"Validation score: 0.183900\n",
"Iteration 518, loss = 0.03776478\n",
"Validation score: 0.184454\n",
"Iteration 519, loss = 0.03761962\n",
"Validation score: 0.184966\n",
"Iteration 520, loss = 0.03747806\n",
"Validation score: 0.185437\n",
"Iteration 521, loss = 0.03734002\n",
"Validation score: 0.185869\n",
"Iteration 522, loss = 0.03720541\n",
"Validation score: 0.186264\n",
"Iteration 523, loss = 0.03707416\n",
"Validation score: 0.186622\n",
"Iteration 524, loss = 0.03694616\n",
"Validation score: 0.186947\n",
"Iteration 525, loss = 0.03682136\n",
"Validation score: 0.187238\n",
"Iteration 526, loss = 0.03669967\n",
"Validation score: 0.187498\n",
"Iteration 527, loss = 0.03658101\n",
"Validation score: 0.187727\n",
"Iteration 528, loss = 0.03646531\n",
"Validation score: 0.187928\n",
"Iteration 529, loss = 0.03635249\n",
"Validation score: 0.188102\n",
"Iteration 530, loss = 0.03624248\n",
"Validation score: 0.188249\n",
"Iteration 531, loss = 0.03613521\n",
"Validation score: 0.188372\n",
"Iteration 532, loss = 0.03603060\n",
"Validation score: 0.188471\n",
"Iteration 533, loss = 0.03592859\n",
"Validation score: 0.188548\n",
"Iteration 534, loss = 0.03582911\n",
"Validation score: 0.188604\n",
"Iteration 535, loss = 0.03573210\n",
"Validation score: 0.188639\n",
"Iteration 536, loss = 0.03563748\n",
"Validation score: 0.188656\n",
"Iteration 537, loss = 0.03554519\n",
"Validation score: 0.188655\n",
"Iteration 538, loss = 0.03545518\n",
"Validation score: 0.188637\n",
"Iteration 539, loss = 0.03536737\n",
"Validation score: 0.188604\n",
"Iteration 540, loss = 0.03528172\n",
"Validation score: 0.188555\n",
"Iteration 541, loss = 0.03519815\n",
"Validation score: 0.188493\n",
"Iteration 542, loss = 0.03511662\n",
"Validation score: 0.188419\n",
"Iteration 543, loss = 0.03503706\n",
"Validation score: 0.188332\n",
"Iteration 544, loss = 0.03495942\n",
"Validation score: 0.188234\n",
"Iteration 545, loss = 0.03488365\n",
"Validation score: 0.188126\n",
"Iteration 546, loss = 0.03480969\n",
"Validation score: 0.188009\n",
"Iteration 547, loss = 0.03473750\n",
"Validation score: 0.187883\n",
"Validation score did not improve more than tol=0.000000 for 10 consecutive epochs. Stopping.\n"
],
"name": "stdout"
},
{
"output_type": "execute_result",
"data": {
"text/plain": [
"MLPRegressor(activation='relu', alpha=0.0001, batch_size='auto', beta_1=0.9,\n",
" beta_2=0.999, early_stopping=True, epsilon=1e-08,\n",
" hidden_layer_sizes=(4,), learning_rate='constant',\n",
" learning_rate_init=0.001, max_fun=15000, max_iter=20000,\n",
" momentum=0.9, n_iter_no_change=10, nesterovs_momentum=True,\n",
" power_t=0.5, random_state=1, shuffle=True, solver='adam',\n",
" tol=1e-07, validation_fraction=0.1, verbose=1, warm_start=False)"
]
},
"metadata": {
"tags": []
},
"execution_count": 13
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "TnYNBflauIzH",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000
},
"outputId": "7852a3a4-f96f-4c63-fcba-46ece5724548"
},
"source": [
"print(\"Training set score: %f\" % reg.score(X_train, Y_train))\n",
"print(\"Test set score: %f\" % reg.score(X_test, Y_test))\n",
"\n",
"for idx, sample in enumerate(X_test):\n",
" print(f\"Label {Y_test[idx][0]:.1f} | Regression {reg.predict([X_test[idx]])[0]:.1f} \\n DATA {list(map(lambda x: round(x,1), X_test[idx].tolist()))}\")"
],
"execution_count": 14,
"outputs": [
{
"output_type": "stream",
"text": [
"Training set score: 0.219207\n",
"Test set score: 0.170286\n",
"Label 0.6 | Regression 0.4 \n",
" DATA [0.3, 0.7, 0.4, 0.3, 0.4, 0.1, 0.4, 0.2, 0.3, 0.1]\n",
"Label 0.7 | Regression 0.4 \n",
" DATA [0.2, 0.6, 0.4, 0.4, 0.3, 0.1, 0.1, 0.3, 0.2, 0.3]\n",
"Label 0.2 | Regression -0.0 \n",
" DATA [0.0, 0.4, 0.1, 0.0, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.3 | Regression 0.5 \n",
" DATA [0.4, 0.7, 0.5, 0.4, 0.6, 0.1, 0.3, 0.3, 0.3, 0.3]\n",
"Label 0.1 | Regression 0.3 \n",
" DATA [0.1, 0.5, 0.2, 0.1, 0.2, 0.1, 0.1, 0.5, 0.4, 0.1]\n",
"Label 0.7 | Regression 0.3 \n",
" DATA [0.2, 0.4, 0.5, 0.5, 0.7, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.7 | Regression 0.5 \n",
" DATA [0.4, 0.8, 0.4, 0.3, 0.4, 0.3, 0.5, 0.4, 0.3, 0.4]\n",
"Label 0.3 | Regression 0.2 \n",
" DATA [0.1, 0.2, 0.5, 0.4, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 1.0 | Regression 1.1 \n",
" DATA [1.0, 0.9, 1.0, 1.0, 0.9, 0.7, 0.7, 0.7, 0.7, 0.4]\n",
"Label 0.8 | Regression 0.4 \n",
" DATA [0.3, 0.6, 0.5, 0.6, 0.6, 0.0, 0.1, 0.3, 0.3, 0.0]\n",
"Label 0.7 | Regression 0.3 \n",
" DATA [0.2, 0.4, 0.5, 0.5, 0.7, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.5 | Regression 0.3 \n",
" DATA [0.1, 0.5, 0.2, 0.1, 0.2, 0.1, 0.1, 0.5, 0.4, 0.1]\n",
"Label 0.8 | Regression 0.2 \n",
" DATA [0.2, 0.6, 0.4, 0.4, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 1.0 | Regression 1.2 \n",
" DATA [0.9, 0.9, 0.9, 0.6, 0.8, 1.0, 0.7, 1.0, 1.0, 0.6]\n",
"Label 0.9 | Regression 1.1 \n",
" DATA [0.9, 0.8, 1.0, 0.9, 0.9, 0.5, 0.4, 0.9, 0.6, 0.8]\n",
"Label 0.7 | Regression 0.2 \n",
" DATA [0.2, 0.6, 0.3, 0.4, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.9 | Regression 0.8 \n",
" DATA [0.4, 0.5, 0.6, 0.5, 0.6, 0.5, 0.4, 0.8, 0.8, 0.3]\n",
"Label 0.9 | Regression 1.1 \n",
" DATA [0.9, 0.8, 0.9, 0.9, 0.7, 0.6, 0.5, 0.8, 0.5, 1.0]\n",
"Label 0.1 | Regression -0.1 \n",
" DATA [0.0, 0.4, 0.0, 0.0, 0.1, 0.0, 0.3, 0.0, 0.0, 0.0]\n",
"Label 0.5 | Regression 0.5 \n",
" DATA [0.3, 0.7, 0.4, 0.3, 0.5, 0.3, 0.5, 0.4, 0.2, 0.4]\n",
"Label 0.5 | Regression 0.4 \n",
" DATA [0.3, 0.7, 0.3, 0.3, 0.5, 0.1, 0.1, 0.3, 0.2, 0.3]\n",
"Label 0.3 | Regression 0.2 \n",
" DATA [0.1, 0.5, 0.1, 0.1, 0.2, 0.0, 0.1, 0.3, 0.3, 0.0]\n",
"Label 0.7 | Regression 0.4 \n",
" DATA [0.3, 0.5, 0.5, 0.3, 0.6, 0.0, 0.1, 0.4, 0.3, 0.0]\n",
"Label 0.1 | Regression 0.1 \n",
" DATA [0.0, 0.1, 0.2, 0.2, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 1.0 | Regression 0.8 \n",
" DATA [0.5, 0.7, 0.6, 0.6, 0.8, 0.4, 0.7, 0.4, 0.3, 0.4]\n",
"Label 0.4 | Regression 0.0 \n",
" DATA [0.1, 0.3, 0.2, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.3 | Regression 0.3 \n",
" DATA [0.2, 0.5, 0.3, 0.2, 0.6, 0.0, 0.1, 0.3, 0.3, 0.0]\n",
"Label 0.9 | Regression 0.2 \n",
" DATA [0.3, 0.5, 0.5, 0.4, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.5 | Regression 0.6 \n",
" DATA [0.4, 0.7, 0.5, 0.3, 0.6, 0.3, 0.3, 0.6, 0.7, 0.5]\n",
"Label 0.3 | Regression 0.4 \n",
" DATA [0.1, 0.5, 0.3, 0.3, 0.2, 0.2, 0.3, 0.4, 0.4, 0.3]\n",
"Label 0.3 | Regression 0.2 \n",
" DATA [0.1, 0.2, 0.5, 0.4, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.7 | Regression 0.2 \n",
" DATA [0.3, 0.5, 0.5, 0.4, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 1.0 | Regression 0.6 \n",
" DATA [0.4, 0.7, 0.5, 0.3, 0.5, 0.3, 0.2, 0.9, 0.9, 0.3]\n",
"Label 0.0 | Regression 0.2 \n",
" DATA [0.2, 0.4, 0.4, 0.4, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.2 | Regression 0.3 \n",
" DATA [0.1, 0.4, 0.3, 0.2, 0.4, 0.1, 0.1, 0.4, 0.3, 0.1]\n",
"Label 0.3 | Regression 0.5 \n",
" DATA [0.1, 0.3, 0.4, 0.1, 0.8, 0.1, 0.1, 0.6, 0.6, 0.6]\n",
"Label 0.9 | Regression 0.6 \n",
" DATA [0.4, 0.6, 0.6, 0.7, 0.6, 0.2, 0.4, 0.3, 0.2, 0.4]\n",
"Label 0.3 | Regression -0.0 \n",
" DATA [0.0, 0.4, 0.1, 0.0, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.3 | Regression 0.4 \n",
" DATA [0.2, 0.4, 0.4, 0.4, 0.3, 0.1, 0.2, 0.3, 0.3, 0.1]\n",
"Label 0.1 | Regression 0.2 \n",
" DATA [0.1, 0.3, 0.4, 0.3, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.2 | Regression 0.0 \n",
" DATA [0.0, 0.3, 0.1, 0.0, 0.1, 0.0, 0.1, 0.2, 0.2, 0.0]\n",
"Label 0.2 | Regression 0.0 \n",
" DATA [0.0, 0.4, 0.1, 0.1, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.2 | Regression -0.0 \n",
" DATA [0.0, 0.4, 0.1, 0.0, 0.1, 0.0, 0.2, 0.1, 0.1, 0.0]\n",
"Label 0.2 | Regression -0.0 \n",
" DATA [0.0, 0.3, 0.1, 0.0, 0.1, 0.0, 0.3, 0.1, 0.1, 0.1]\n",
"Label 1.0 | Regression 0.2 \n",
" DATA [0.3, 0.5, 0.5, 0.4, 0.4, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.0 | Regression 0.2 \n",
" DATA [0.4, 0.9, 0.4, 0.3, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.3 | Regression 0.4 \n",
" DATA [0.2, 0.4, 0.4, 0.2, 0.6, 0.1, 0.1, 0.5, 0.5, 0.1]\n",
"Label 0.1 | Regression -0.1 \n",
" DATA [0.0, 0.2, 0.1, 0.1, 0.0, 0.0, 0.1, 0.1, 0.1, 0.0]\n",
"Label 0.5 | Regression 0.3 \n",
" DATA [0.1, 0.5, 0.2, 0.1, 0.2, 0.1, 0.1, 0.5, 0.4, 0.1]\n",
"Label 0.6 | Regression 0.8 \n",
" DATA [0.6, 0.9, 0.6, 0.4, 0.5, 0.6, 0.9, 0.5, 0.5, 0.4]\n",
"Label 0.6 | Regression 0.2 \n",
" DATA [0.4, 0.9, 0.4, 0.3, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.9 | Regression 0.5 \n",
" DATA [0.3, 0.7, 0.4, 0.3, 0.4, 0.2, 0.2, 0.6, 0.7, 0.3]\n",
"Label 0.7 | Regression 0.5 \n",
" DATA [0.3, 0.5, 0.5, 0.4, 0.5, 0.1, 0.2, 0.5, 0.4, 0.2]\n",
"Label 0.4 | Regression 0.2 \n",
" DATA [0.2, 0.6, 0.4, 0.4, 0.4, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.5 | Regression 0.6 \n",
" DATA [0.4, 0.6, 0.5, 0.5, 0.7, 0.2, 0.2, 0.6, 0.6, 0.3]\n",
"Label 0.6 | Regression 0.6 \n",
" DATA [0.2, 0.4, 0.3, 0.2, 0.4, 0.2, 0.1, 0.8, 0.7, 0.9]\n",
"Label 0.8 | Regression 0.8 \n",
" DATA [0.6, 1.0, 0.5, 0.5, 0.5, 0.6, 0.5, 0.8, 0.8, 0.3]\n",
"Label 0.2 | Regression 0.2 \n",
" DATA [0.1, 0.3, 0.2, 0.2, 0.2, 0.0, 0.1, 0.3, 0.2, 0.0]\n",
"Label 0.4 | Regression 0.4 \n",
" DATA [0.4, 0.6, 0.5, 0.3, 0.7, 0.0, 0.1, 0.3, 0.3, 0.0]\n",
"Label 0.3 | Regression 0.2 \n",
" DATA [0.2, 0.4, 0.4, 0.4, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.1 | Regression -0.1 \n",
" DATA [0.0, 0.1, 0.2, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.6 | Regression 0.4 \n",
" DATA [0.5, 0.5, 0.7, 0.7, 0.6, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.1 | Regression 0.2 \n",
" DATA [0.1, 0.2, 0.5, 0.4, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0]\n",
"Label 0.8 | Regression 0.4 \n",
" DATA [0.3, 0.7, 0.3, 0.3, 0.4, 0.1, 0.1, 0.3, 0.3, 0.2]\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "soClf3UjB7q7",
"colab_type": "code",
"colab": {}
},
"source": [
"import pickle\n",
"model = {\n",
" 'network': reg,\n",
" 'norm': min_max_scaler\n",
"}\n",
"pickle.dump(model, open(\"model.p\", \"wb\"))"
],
"execution_count": 28,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "8JcGGl1CZJXz",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 69
},
"outputId": "925caf68-2180-48e5-b80c-db09b2b16540"
},
"source": [
"model = pickle.load(open(\"model.p\", \"rb\"))\n",
"\n",
"def predict(model, input):\n",
" norm_input = [min(1, max(0, x)) for x in model['norm'].transform([data_input])[0]]\n",
" return min(1, max(0, model['network'].predict([norm_input])[0]))\n",
"\n",
"data_input = X[9]\n",
"\n",
"print(f\"Regression {predict(model, data_input):.1f}\")"
],
"execution_count": 50,
"outputs": [
{
"output_type": "stream",
"text": [
"[ 0. 0. 500. 500. 633.14327783\n",
" 2836. 10. 283.6 300. 137.64543823]\n",
"Regression 0.7\n"
],
"name": "stdout"
}
]
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment