Skip to content

Instantly share code, notes, and snippets.

@andersjohanandreassen
Last active March 16, 2019 08:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save andersjohanandreassen/c05406aa0dafaab54af732b7e47acaa3 to your computer and use it in GitHub Desktop.
Save andersjohanandreassen/c05406aa0dafaab54af732b7e47acaa3 to your computer and use it in GitHub Desktop.
TimeDistributed with Dense does not mask bias
import numpy as np
from keras.models import Model
from keras.layers import Masking, Input, TimeDistributed, Dense
# Construct time series data set
pad_value = 0.1
t1 = [3, 1, 2, 0.1]
t2 = [1, 1, 1, 0.1]
t_pad = [pad_value, pad_value, pad_value, pad_value]
time_series = np.asarray([[t1, t2, t_pad]])
###############################################################################################
def build_model(use_masking, add_constant, mask_value=pad_value):
"""Build a network that sums the values at each time step in a time series.
Arguments:
- use_masking: Boolean to activate a masking layer to mask mask_value
- add_constant: Constant to add to the sum. This will be the bias value in the network.
Returns:
- Keras model
We will construct a neural network with a linear activation function,
and we will manually set the weights to 1 and the bias to add_constant.
"""
my_input = Input(shape=(None, 4))
if use_masking:
mask = Masking(mask_value=mask_value)(my_input)
out = TimeDistributed(Dense(1, activation='linear'))(mask)
else:
out = TimeDistributed(Dense(1, activation='linear'))(my_input)
model = Model(inputs=my_input, outputs=out)
# Set all weights multiplying the input to 1, and set the bias to add_constant
model.set_weights([np.ones_like(model.get_weights()[0]),
add_constant*np.ones_like(model.get_weights()[1])])
return model
def print_helper(time_series_sum):
s1, s2, s3 = time_series_sum
print("Sum t1 \t = ", s1[0])
print("Sum t2 \t = ", s2[0])
print("Sum t_pad = ", s3[0])
###############################################################################################
# Test model
###############################################################################################
print("Manuel sum of time series steps (without any masking)")
print_helper([[np.sum(t1)], [np.sum(t2)], [np.sum(t_pad)]])
print()
### Test 1
print("Test 1: ")
print("use_masking = False")
print("add_constant = 0")
time_series_sum_test_1 = build_model(use_masking=False, add_constant = 0).predict(time_series)[0]
print_helper(time_series_sum_test_1)
print("This works as expected \n")
### Test 2
print("Test 2: ")
print("use_masking = False")
print("add_constant = 10")
time_series_sum_test_1 = build_model(use_masking=False, add_constant = 10).predict(time_series)[0]
print_helper(time_series_sum_test_1)
print("This works as expected \n")
### Test 3
print("Test 3: ")
print("use_masking = True")
print("add_constant = 0")
time_series_sum_test_1 = build_model(use_masking=True, add_constant = 0).predict(time_series)[0]
print_helper(time_series_sum_test_1)
print("This is a check that masking works as expected on the weights multiplying the inputs. \n(Note: The bias is now set to zero) \n")
### Test 4
print("Test 4: ")
print("use_masking = True")
print("add_constant = 10")
time_series_sum_test_1 = build_model(use_masking=True, add_constant = 10).predict(time_series)[0]
print_helper(time_series_sum_test_1)
print("This fails! sum of t_pad is equal to the bias in the network \nThe masking layer is not masking the bias!")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment