Skip to content

Instantly share code, notes, and snippets.

@benman1
Last active May 1, 2019 13:53
Show Gist options
  • Save benman1/0424365833d46faac82ad625650d4926 to your computer and use it in GitHub Desktop.
Save benman1/0424365833d46faac82ad625650d4926 to your computer and use it in GitHub Desktop.
mini bert transformation
from tensorflow.python.keras.layers import ZeroPadding1D, Concatenate, Reshape, TimeDistributed, LSTM, Dropout, Masking, Cropping1D
from tensorflow.python.keras.regularizers import l1
reg = l1(10e-5)
def mini_bert_block(
input_layer, embedding_dim=300,
drop_out=0.5, sequence_length=800, context_width=3,
):
if (context_width % 2) == 0:
raise ValueError(
'Please provide an uneven number for the context!'
)
paddings = []
for i in range(context_width):
paddings.append(
ZeroPadding1D((i, context_width-1-i))(input_layer)
)
concat = Concatenate(axis=-1)(paddings)
reshape = Reshape(
(sequence_length + context_width - 1, context_width, embedding_dim)
)(concat)
#masked = Masking(mask_value=0.0)(reshape)
normed = BatchNormalization(axis=-1)(reshape)
recurrent = TimeDistributed(
CuDNNGRU( # optionally use a Bidirectional here
units=embedding_dim,
#activation='relu',
return_sequences=False,
#use_bias=True,
activity_regularizer=reg,
)
)(normed)
crop = (context_width - 1) // 2
sliced = Cropping1D((crop, crop))(recurrent)
return Dropout(drop_out)(sliced)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment