Skip to content

Instantly share code, notes, and snippets.

@ngopal
Created October 6, 2018 23:29
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ngopal/13f66a216fdd88cad106a6878804fdab to your computer and use it in GitHub Desktop.
Save ngopal/13f66a216fdd88cad106a6878804fdab to your computer and use it in GitHub Desktop.
lstm toy
# Toy Example of LSTM
## Relevant Links
* https://www.kaggle.com/amirrezaeian/time-series-data-analysis-using-lstm-tutorial
* https://stackoverflow.com/questions/13703720/converting-between-datetime-timestamp-and-datetime64
* https://visualstudiomagazine.com/articles/2014/01/01/how-to-standardize-data-for-neural-networks.aspx
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"ibm = pd.read_csv(\"IBM_adjusted.txt\", header=None)"
]
},
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [],
"source": [
"# http://www.kibot.com/support.aspx#data_format\n",
"ibm.columns = [\"Date\",\"Time\",\"Open\",\"High\",\"Low\",\"Close\",\"Volume\"]"
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {},
"outputs": [],
"source": [
"# ibm['Datetime'] = pd.to_datetime(ibm['Date'])"
]
},
{
"cell_type": "code",
"execution_count": 30,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style>\n",
" .dataframe thead tr:only-child th {\n",
" text-align: right;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: left;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Date</th>\n",
" <th>Time</th>\n",
" <th>Open</th>\n",
" <th>High</th>\n",
" <th>Low</th>\n",
" <th>Close</th>\n",
" <th>Volume</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>2145764</th>\n",
" <td>08/17/2018</td>\n",
" <td>16:19</td>\n",
" <td>146.01</td>\n",
" <td>146.01</td>\n",
" <td>146.01</td>\n",
" <td>146.01</td>\n",
" <td>300</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2145765</th>\n",
" <td>08/17/2018</td>\n",
" <td>16:21</td>\n",
" <td>146.07</td>\n",
" <td>146.07</td>\n",
" <td>146.07</td>\n",
" <td>146.07</td>\n",
" <td>2469</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2145766</th>\n",
" <td>08/17/2018</td>\n",
" <td>16:23</td>\n",
" <td>146.01</td>\n",
" <td>146.01</td>\n",
" <td>146.01</td>\n",
" <td>146.01</td>\n",
" <td>200</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2145767</th>\n",
" <td>08/17/2018</td>\n",
" <td>16:36</td>\n",
" <td>146.01</td>\n",
" <td>146.01</td>\n",
" <td>146.01</td>\n",
" <td>146.01</td>\n",
" <td>1800</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2145768</th>\n",
" <td>08/17/2018</td>\n",
" <td>18:52</td>\n",
" <td>146.07</td>\n",
" <td>146.07</td>\n",
" <td>146.07</td>\n",
" <td>146.07</td>\n",
" <td>100</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Date Time Open High Low Close Volume\n",
"2145764 08/17/2018 16:19 146.01 146.01 146.01 146.01 300\n",
"2145765 08/17/2018 16:21 146.07 146.07 146.07 146.07 2469\n",
"2145766 08/17/2018 16:23 146.01 146.01 146.01 146.01 200\n",
"2145767 08/17/2018 16:36 146.01 146.01 146.01 146.01 1800\n",
"2145768 08/17/2018 18:52 146.07 146.07 146.07 146.07 100"
]
},
"execution_count": 30,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"ibm.tail()"
]
},
{
"cell_type": "code",
"execution_count": 49,
"metadata": {},
"outputs": [],
"source": [
"import sys \n",
"import numpy as np # linear algebra\n",
"from scipy.stats import randint\n",
"import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv), data manipulation as in SQL\n",
"import matplotlib.pyplot as plt # this is used for the plot the graph \n",
"import seaborn as sns # used for plot interactive graph. \n",
"from sklearn.model_selection import train_test_split # to split the data into two parts\n",
"from sklearn.cross_validation import KFold # use for cross validation\n",
"from sklearn.preprocessing import StandardScaler # for normalization\n",
"from sklearn.preprocessing import MinMaxScaler\n",
"from sklearn.pipeline import Pipeline # pipeline making\n",
"from sklearn.model_selection import cross_val_score\n",
"from sklearn.feature_selection import SelectFromModel\n",
"from sklearn import metrics # for the check the error and accuracy of the model\n",
"from sklearn.metrics import mean_squared_error,r2_score\n",
"\n",
"## for Deep-learing:\n",
"import keras\n",
"from keras.layers import *\n",
"from keras.models import *\n",
"from keras.utils import to_categorical\n",
"from keras.optimizers import SGD \n",
"from keras.callbacks import EarlyStopping\n",
"from keras.utils import np_utils\n",
"import itertools\n",
"from keras.layers import LSTM\n",
"from keras.layers.convolutional import Conv1D\n",
"from keras.layers.convolutional import MaxPooling1D\n",
"from keras.layers import *"
]
},
{
"cell_type": "code",
"execution_count": 42,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style>\n",
" .dataframe thead tr:only-child th {\n",
" text-align: right;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: left;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Open</th>\n",
" <th>High</th>\n",
" <th>Low</th>\n",
" <th>Close</th>\n",
" <th>Volume</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>2145764</th>\n",
" <td>1.106044</td>\n",
" <td>1.105023</td>\n",
" <td>1.107076</td>\n",
" <td>1.106058</td>\n",
" <td>-0.396035</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2145765</th>\n",
" <td>1.107538</td>\n",
" <td>1.106516</td>\n",
" <td>1.108569</td>\n",
" <td>1.107552</td>\n",
" <td>-0.351312</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2145766</th>\n",
" <td>1.106044</td>\n",
" <td>1.105023</td>\n",
" <td>1.107076</td>\n",
" <td>1.106058</td>\n",
" <td>-0.398097</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2145767</th>\n",
" <td>1.106044</td>\n",
" <td>1.105023</td>\n",
" <td>1.107076</td>\n",
" <td>1.106058</td>\n",
" <td>-0.365106</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2145768</th>\n",
" <td>1.107538</td>\n",
" <td>1.106516</td>\n",
" <td>1.108569</td>\n",
" <td>1.107552</td>\n",
" <td>-0.400158</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Open High Low Close Volume\n",
"2145764 1.106044 1.105023 1.107076 1.106058 -0.396035\n",
"2145765 1.107538 1.106516 1.108569 1.107552 -0.351312\n",
"2145766 1.106044 1.105023 1.107076 1.106058 -0.398097\n",
"2145767 1.106044 1.105023 1.107076 1.106058 -0.365106\n",
"2145768 1.107538 1.106516 1.108569 1.107552 -0.400158"
]
},
"execution_count": 42,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"ss = StandardScaler()\n",
"values = ss.fit_transform(ibm[[\"Open\",\"High\",\"Low\",\"Close\",\"Volume\"]])\n",
"df = pd.DataFrame(values)\n",
"df.columns = [\"Open\",\"High\",\"Low\",\"Close\",\"Volume\"]\n",
"df.tail()"
]
},
{
"cell_type": "code",
"execution_count": 44,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(1716614, 1, 4) (1716614,) (429155, 1, 4) (429155,)\n"
]
}
],
"source": [
"# split into train and test sets\n",
"n_train_time = int(2145768*0.8)\n",
"n_test_time = 2145768 - n_train_time\n",
"train = values[:n_train_time, :]\n",
"test = values[n_train_time:, :]\n",
"##test = values[n_train_time:n_test_time, :]\n",
"\n",
"# split into input and outputs\n",
"train_X, train_y = train[:, :-1], train[:, -1]\n",
"test_X, test_y = test[:, :-1], test[:, -1]\n",
"\n",
"# reshape input to be 3D [samples, timesteps, features]\n",
"train_X = train_X.reshape((train_X.shape[0], 1, train_X.shape[1]))\n",
"test_X = test_X.reshape((test_X.shape[0], 1, test_X.shape[1]))\n",
"print(train_X.shape, train_y.shape, test_X.shape, test_y.shape) \n",
"# We reshaped the input into the 3D format as expected by LSTMs, namely [samples, timesteps, features]."
]
},
{
"cell_type": "code",
"execution_count": 54,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[[-1.61560232, -1.61632936, -1.6148801 , -1.61561293]],\n",
"\n",
" [[-1.61659808, -1.61632936, -1.61587597, -1.61660869]],\n",
"\n",
" [[-1.6161002 , -1.61632936, -1.61587597, -1.61561293]],\n",
"\n",
" ..., \n",
" [[ 1.49563339, 1.49481852, 1.49546416, 1.49465523]],\n",
"\n",
" [[ 1.49488658, 1.4943207 , 1.49496622, 1.49540205]],\n",
"\n",
" [[ 1.49463764, 1.4943207 , 1.49521519, 1.49465523]]])"
]
},
"execution_count": 54,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"train_X[]"
]
},
{
"cell_type": "code",
"execution_count": 50,
"metadata": {},
"outputs": [
{
"ename": "TypeError",
"evalue": "while_loop() got an unexpected keyword argument 'maximum_iterations'",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-50-ce8645f0ff21>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mmodel\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mSequential\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mGRU\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m100\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput_shape\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_X\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_X\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mDropout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0.2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mDense\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcompile\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mloss\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'mean_squared_error'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moptimizer\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'adam'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/anaconda3/envs/py36/lib/python3.6/site-packages/keras/engine/sequential.py\u001b[0m in \u001b[0;36madd\u001b[0;34m(self, layer)\u001b[0m\n\u001b[1;32m 163\u001b[0m \u001b[0;31m# and create the node connecting the current layer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 164\u001b[0m \u001b[0;31m# to the input layer we just created.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 165\u001b[0;31m \u001b[0mlayer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 166\u001b[0m \u001b[0mset_inputs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 167\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/anaconda3/envs/py36/lib/python3.6/site-packages/keras/layers/recurrent.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs, initial_state, constants, **kwargs)\u001b[0m\n\u001b[1;32m 530\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 531\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0minitial_state\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mconstants\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 532\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mRNN\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__call__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 533\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 534\u001b[0m \u001b[0;31m# If any of `initial_state` or `constants` are specified and are Keras\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/anaconda3/envs/py36/lib/python3.6/site-packages/keras/engine/base_layer.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs, **kwargs)\u001b[0m\n\u001b[1;32m 455\u001b[0m \u001b[0;31m# Actually call the layer,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 456\u001b[0m \u001b[0;31m# collecting output(s), mask(s), and shape(s).\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 457\u001b[0;31m \u001b[0moutput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcall\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 458\u001b[0m \u001b[0moutput_mask\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcompute_mask\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mprevious_mask\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 459\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/anaconda3/envs/py36/lib/python3.6/site-packages/keras/layers/recurrent.py\u001b[0m in \u001b[0;36mcall\u001b[0;34m(self, inputs, mask, training, initial_state)\u001b[0m\n\u001b[1;32m 1647\u001b[0m \u001b[0mmask\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmask\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1648\u001b[0m \u001b[0mtraining\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtraining\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1649\u001b[0;31m initial_state=initial_state)\n\u001b[0m\u001b[1;32m 1650\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1651\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mproperty\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/anaconda3/envs/py36/lib/python3.6/site-packages/keras/layers/recurrent.py\u001b[0m in \u001b[0;36mcall\u001b[0;34m(self, inputs, mask, training, initial_state, constants)\u001b[0m\n\u001b[1;32m 647\u001b[0m \u001b[0mmask\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmask\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 648\u001b[0m \u001b[0munroll\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0munroll\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 649\u001b[0;31m input_length=timesteps)\n\u001b[0m\u001b[1;32m 650\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstateful\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 651\u001b[0m \u001b[0mupdates\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/usr/local/anaconda3/envs/py36/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py\u001b[0m in \u001b[0;36mrnn\u001b[0;34m(step_function, inputs, initial_states, go_backwards, mask, constants, unroll, input_length)\u001b[0m\n\u001b[1;32m 3009\u001b[0m \u001b[0mparallel_iterations\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m32\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3010\u001b[0m \u001b[0mswap_memory\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 3011\u001b[0;31m maximum_iterations=input_length)\n\u001b[0m\u001b[1;32m 3012\u001b[0m \u001b[0mlast_time\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfinal_outputs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3013\u001b[0m \u001b[0moutput_ta\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfinal_outputs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mTypeError\u001b[0m: while_loop() got an unexpected keyword argument 'maximum_iterations'"
]
}
],
"source": [
"model = Sequential()\n",
"model.add(GRU(100, input_shape=(train_X.shape[1], train_X.shape[2])))\n",
"model.add(Dropout(0.2))\n",
"model.add(Dense(1))\n",
"model.compile(loss='mean_squared_error', optimizer='adam')\n",
"\n",
"\n",
"\n",
"# fit network\n",
"history = model.fit(train_X, train_y, epochs=20, batch_size=70, validation_data=(test_X, test_y), verbose=2, shuffle=False)\n",
"\n",
"# summarize history for loss\n",
"plt.plot(history.history['loss'])\n",
"plt.plot(history.history['val_loss'])\n",
"plt.title('model loss')\n",
"plt.ylabel('loss')\n",
"plt.xlabel('epoch')\n",
"plt.legend(['train', 'test'], loc='upper right')\n",
"plt.show()\n",
"\n",
"# make a prediction\n",
"yhat = model.predict(test_X)\n",
"test_X = test_X.reshape((test_X.shape[0], 7))\n",
"# invert scaling for forecast\n",
"inv_yhat = np.concatenate((yhat, test_X[:, -6:]), axis=1)\n",
"inv_yhat = scaler.inverse_transform(inv_yhat)\n",
"inv_yhat = inv_yhat[:,0]\n",
"# invert scaling for actual\n",
"test_y = test_y.reshape((len(test_y), 1))\n",
"inv_y = np.concatenate((test_y, test_X[:, -6:]), axis=1)\n",
"inv_y = scaler.inverse_transform(inv_y)\n",
"inv_y = inv_y[:,0]\n",
"# calculate RMSE\n",
"rmse = np.sqrt(mean_squared_error(inv_y, inv_yhat))\n",
"print('Test RMSE: %.3f' % rmse)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.1"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment