Skip to content

Instantly share code, notes, and snippets.

@NMZivkovic
Last active August 17, 2019 14:58
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save NMZivkovic/3c1a272d65d9b182f48d267199c4d879 to your computer and use it in GitHub Desktop.
Save NMZivkovic/3c1a272d65d9b182f48d267199c4d879 to your computer and use it in GitHub Desktop.
class Decoder(Layer):
def __init__(self, num_neurons, num_hidden_neurons, num_heads, vocabular_size, num_dec_layers=6):
super(Decoder, self).__init__()
self.num_dec_layers = num_dec_layers
self.pre_processing_layer = PreProcessingLayer(num_neurons, vocabular_size)
self.decoder_layers = [DecoderLayer(num_neurons, num_hidden_neurons, num_heads) for _ in range(num_dec_layers)]
def call(self, sequence, enconder_output, training, look_ahead_mask, padding_mask):
sequence = self.pre_processing_layer(sequence, training, mask)
for i in range(self.num_dec_layers):
sequence, attention_weights1, attention_weights2 = self.dec_layers[i](sequence, enconder_output, training, look_ahead_mask, padding_mask)
attention_weights['decoder_layer{}_attention_weights1'.format(i+1)] = attention_weights1
attention_weights['decoder_layer{}_attention_weights2'.format(i+1)] = attention_weights2
return sequence, attention_weights
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment