Skip to content

Instantly share code, notes, and snippets.

View EmnamoR's full-sized avatar
:octocat:
working

Emna Amor EmnamoR

:octocat:
working
View GitHub Profile
def sample_gumbel(shape, eps=1e-20):
"""Sample from Gumbel(0, 1)"""
U = tf.random_uniform(shape,minval=0,maxval=1)
return -tf.log(-tf.log(U + eps) + eps)
def gumbel_softmax_sample(logits, temperature):
""" Draw a sample from the Gumbel-Softmax distribution"""
y = logits + sample_gumbel(tf.shape(logits))
return tf.nn.softmax( y / temperature)
@dmmiller612
dmmiller612 / KerasAttention.py
Last active June 19, 2021 08:32
Keras Sequence to Sequence LSTM with Attention Mechanism
from keras.layers.core import Permute
from keras.layers import Dense, Activation, RepeatVector, merge,Flatten, TimeDistributed, Input
from keras.layers import Embedding, LSTM
from keras.models import Model
hidden = 225
features = get_features()
outputs = get_outputs()