Skip to content

Instantly share code, notes, and snippets.

@NMZivkovic
Created August 17, 2019 13:34
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save NMZivkovic/8aa9554afa9e91fbde2e4a8d28a2e165 to your computer and use it in GitHub Desktop.
Save NMZivkovic/8aa9554afa9e91fbde2e4a8d28a2e165 to your computer and use it in GitHub Desktop.
class PreProcessingLayer(Layer):
def __init__(self, num_neurons, vocabular_size):
super(PreProcessingLayer, self).__init__()
# Initialize
self.num_neurons = num_neurons
# Add embedings and positional encoding
self.embedding = Embedding(vocabular_size, self.num_neurons)
positional_encoding_handler = PositionalEncoding(vocabular_size, self.num_neurons)
self.positional_encoding = positional_encoding.get_positional_encoding()
# Add embedings and positional encoding
self.dropout = Dropout(0.1)
def call(self, sequence, training, mask):
sequence_lenght = tf.shape(sequence)[1]
sequence = self.embedding(sequence)
sequence *= tf.math.sqrt(tf.cast(self.num_neurons, tf.float32))
sequence += self.positional_encoding[:, :sequence_lenght, :]
sequence = self.dropout(sequence, training=training)
return sequence
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment