Skip to content

Instantly share code, notes, and snippets.

View stoney95's full-sized avatar

Simon Steinheber stoney95

  • MaibornWolff
  • Munich
View GitHub Profile
class AttentiveConv(Layer):
def __init__(self, kernel_activation='tanh', filters=3, **kwargs):
super(AttentiveConv, self).__init__(**kwargs)
self.kernel_activation = activations.get(kernel_activation)
if filters%2 == 0:
self.filters = filters - 1
else:
self.filters = filters
self.filters = filters
@stoney95
stoney95 / Attention
Created November 23, 2018 09:17
Attentive Convolution with custom Attention layer
from keras.layers import Lambda, Reshape, RepeatVector, Concatenate, Conv1D, Activation
from keras.layers import Layer
from keras import activations
class Attention(Layer):
def __init__(self, kernel_activation='hard_sigmoid', before=False, **kwargs):
super(Attention, self).__init__(**kwargs)
self.kernel_activation = activations.get(kernel_activation)
K.set_floatx('float32')