Skip to content

Instantly share code, notes, and snippets.

@jamartinh
Created February 8, 2018 19:23
Show Gist options
  • Save jamartinh/a38d8f0322818eeca9b6c5fcae5bc165 to your computer and use it in GitHub Desktop.
Save jamartinh/a38d8f0322818eeca9b6c5fcae5bc165 to your computer and use it in GitHub Desktop.
A Deep Statefull LSTM
import os
import keras
from keras import Input
from keras.layers import BatchNormalization, Dense, LSTM, concatenate, Dropout, TimeDistributed
from keras.optimizers import RMSprop
from keras.utils import plot_model
from metrics import in_top5, top5_accuracy
def gen_nn_model(hyperparameters, target_symbols):
# - Input definitions:
batch_size = hyperparameters['batch_size']
n_cols = hyperparameters['n_cols']
n_history = hyperparameters['n_history']
# -- Daily Input
daily_input = Input(shape = (n_history, n_cols), batch_shape = (batch_size, n_history, n_cols), dtype = 'float32',
name = 'daily_input')
normalized_daily_input = BatchNormalization()(daily_input)
# normalized_daily_input = TimeDistributed(Dense(n_cols, activation='linear'))(normalized_daily_input)
normalized_daily_input = TimeDistributed(Dense(n_cols, activation = 'relu'))(normalized_daily_input)
normalized_daily_input = TimeDistributed(Dense(n_cols, activation = 'relu'))(normalized_daily_input)
normalized_daily_input = TimeDistributed(Dense(n_cols, activation = 'relu'))(normalized_daily_input)
normalized_daily_input = Dropout(0.2)(normalized_daily_input)
# Deep LSTM Layers
x = LSTM(n_cols // 7,
return_state = False,
return_sequences = True,
stateful = True)(normalized_daily_input)
x = LSTM(n_cols // 7,
stateful = False)(x)
r = Dense(n_cols - 1, activation = 'tanh')(x)
r = Dense(n_cols - 1, activation = 'tanh')(r)
r = Dense(len(target_symbols), activation = 'linear', name = 'r')(r)
joint_flow = concatenate([x, r])
p = Dense(n_cols - 1, activation = 'sigmoid')(joint_flow)
p = Dense(n_cols - 1, activation = 'sigmoid')(p)
# p = keras.layers.core.Lambda(lambda x: x *5)(p)
p = Dense(len(target_symbols), activation = 'softmax', name = 'p')(p)
# l = keras.layers.core.Lambda(lambda x: negative(x))(p)
l = Dense(n_cols - 1, activation = 'sigmoid')(joint_flow)
l = Dense(n_cols - 1, activation = 'sigmoid')(l)
# l = keras.layers.core.Lambda(lambda x: x / 2)(l)
l = Dense(len(target_symbols), activation = 'softmax', name = 'l')(l)
# -- Model creation
inputs = [daily_input]
outputs = [r, p, l]
model = keras.models.Model(inputs = inputs, outputs = outputs)
return batch_size, model
def compile_model(model):
lr = 0.1
loss = { 'r': 'mean_absolute_error',
'p': 'kullback_leibler_divergence',
'l': 'kullback_leibler_divergence'
}
loss_weights = { 'r': .20,
'p': .40,
'l': .40,
}
opt = RMSprop(lr = lr)
# opt = 'adam'
metrics = { 'p': [in_top5, top5_accuracy],
'l': [in_top5, top5_accuracy] }
model.compile(optimizer = opt,
loss = loss,
loss_weights = loss_weights,
metrics = metrics
)
return model
def show_model_details(EXP_NAME, EXP_PATH, model):
print(model.summary())
str_image_file_name = os.path.join(EXP_PATH, '{0}.png'.format(EXP_NAME))
plot_model(model, to_file = str_image_file_name,
show_shapes = True,
show_layer_names = True,
rankdir = 'TD')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment