Skip to content

Instantly share code, notes, and snippets.

@NMZivkovic
Created August 17, 2019 14:27
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save NMZivkovic/19454c21b3981491ca3fe0267fcf515d to your computer and use it in GitHub Desktop.
Save NMZivkovic/19454c21b3981491ca3fe0267fcf515d to your computer and use it in GitHub Desktop.
class EncoderLayer(Layer):
def __init__(self, num_neurons, num_hidden_neurons, num_heads):
super(EncoderLayer, self).__init__()
# Build multi head attention layer and necessary additional layers
self.multi_head_attention_layer, self.attention_dropout, self.attention_normalization = \
build_multi_head_attention_layers(num_neurons, num_heads)
# Build feed-forward neural network and necessary additional layers
self.feed_forward_layer, self.feed_forward_dropout, self.feed_forward_normalization = \
build_feed_forward_layers(num_neurons, num_hidden_neurons)
def call(self, sequence, training, mask):
# Calculate attention output
attnention_output, _ = self.multi_head_attention_layer(sequence, sequence, sequence, mask)
attnention_output = self.attention_dropout(attnention_output, training=training)
attnention_output = self.attention_normalization(sequence + attnention_output)
# Calculate output of feed forward network
output = self.feed_forward_layer(attnention_output)
output = self.feed_forward_dropout(output, training=training)
# Combine two outputs
output = self.feed_forward_normalization(attnention_output + output)
return output
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment