-
-
Save wasifhameed/1f104a21f680eaa55bce2abf9250fde5 to your computer and use it in GitHub Desktop.
LSTNet Model
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def LSTNetModel(init, input_shape): | |
# m is the number of time-series | |
m = input_shape[2] | |
# Get tensor shape except batchsize | |
tensor_shape = input_shape[1:] | |
if K.image_data_format() == 'channels_last': | |
ch_axis = 3 | |
else: | |
ch_axis = 1 | |
X = Input(shape = tensor_shape) | |
# CNN | |
if init.CNNFilters > 0 and init.CNNKernel > 0: | |
# Add an extra dimension of size 1 which is the channel dimension in Conv2D | |
C = Reshape((input_shape[1], input_shape[2], 1))(X) | |
# Apply a Conv2D that will transform it into data of dimensions (batchsize, time, 1, NumofFilters) | |
C = Conv2D(filters=init.CNNFilters, kernel_size=(init.CNNKernel, m), kernel_initializer=init.initialiser)(C) | |
C = Dropout(init.dropout)(C) | |
# Adjust data dimensions by removing axis=2 which is always equal to 1 | |
c_shape = K.int_shape(C) | |
C = Reshape((c_shape[1], c_shape[3]))(C) | |
else: | |
# If configured not to apply CNN, copy the input | |
C = X | |
# GRU | |
# Apply a GRU layer (with activation set to 'relu' as per the paper) and take the returned states as result | |
_, R = GRU(init.GRUUnits, activation="relu", return_sequences = False, return_state = True)(C) | |
R = Dropout(init.dropout)(R) | |
# SkipGRU | |
if init.skip > 0: | |
# Calculate the number of values to use which is equal to the window divided by how many time values to skip | |
pt = int(init.window / init.skip) | |
S = PreSkipTrans(pt, int((init.window - init.CNNKernel + 1) / pt))(C) | |
_, S = GRU(init.SkipGRUUnits, activation="relu", return_sequences = False, return_state = True)(S) | |
S = PostSkipTrans(int((init.window - init.CNNKernel + 1) / pt))([S,X]) | |
# Concatenate the outputs of GRU and SkipGRU | |
R = Concatenate(axis=1)([R,S]) | |
# Dense layer | |
Y = Flatten()(R) | |
Y = Dense(m)(Y) | |
# AR | |
if init.highway > 0: | |
Z = PreARTrans(init.highway)(X) | |
Z = Flatten()(Z) | |
Z = Dense(1)(Z) | |
Z = PostARTrans(m)([Z,X]) | |
# Generate output as the summation of the Dense layer output and the AR one | |
Y = Add()([Y,Z]) | |
# Generate Model | |
model = Model(inputs = X, outputs = Y) | |
return model |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment