Skip to content

Instantly share code, notes, and snippets.

@mercy0387
Created December 2, 2017 09:00
Show Gist options
  • Save mercy0387/f0857eab422c8f1b9370818bca61ddbb to your computer and use it in GitHub Desktop.
Save mercy0387/f0857eab422c8f1b9370818bca61ddbb to your computer and use it in GitHub Desktop.
kerasで頭に描いたネットワーク構造を実現するためのTips ~ Lambda 編 ~ ref: https://qiita.com/Mco7777/items/158296ed7f66aed2ffc3
from keras import backend as K
model_input = Input(shape=(10,))
calculated = K.sqrt(model_input + 1.0)
model = Model(inputs=model_input, outputs=calculated)
from keras import backend as K
model_input = Input(shape=(10,))
calculated = Lambda(lambda x: K.sqrt(x + 1.0), output_shape=(10,))(model_input)
model = Model(inputs=model_input, outputs=calculated)
x1 = np.random.rand(3, 5)
x2 = np.random.rand(3, 5)
x = np.concatenate((x1, x2), axis=1)
x.shape
# (3, 10)
from keras.models import Model
from keras.layers import Concatenate, Input
x1 = Input(shape=(5,))
x2 = Input(shape=(5,))
x = Concatenate()([x1, x2])
model = Model(inputs=[x1,x2], outputs=x)
from keras.models import Model
from keras.layers import Input, Dense, Activation, Multiply
my_dense = Dense(5)
model_input = Input(shape=(5,))
mid1 = my_dense(model_input)
mid2 = Dense(5)(mid1)
mid3 = Multiply()([mid1, mid2])
loop = my_dense(mid3)
output1 = Activation('relu')(loop)
output2 = Activation('relu')(mid2)
model = Model(inputs=model_input, outputs=[output1, output2])
x = np.random.rand(3, 10)
x1 = x[:, :5]
x2 = x[:, 5:]
from keras.layers.core import Lambda
model_input = Input(shape=(10,))
x1 = Lambda(lambda x: x[:, :5], output_shape=(5,))(model_input)
x2 = Lambda(lambda x: x[:, 5:], output_shape=(5,))(model_input)
model = Model(inputs=model_input, outputs=[x1,x2])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment