Skip to content

Instantly share code, notes, and snippets.

@ht93
Last active October 25, 2017 15:58
Show Gist options
  • Save ht93/05f07c4610df21964324e92555e00223 to your computer and use it in GitHub Desktop.
Save ht93/05f07c4610df21964324e92555e00223 to your computer and use it in GitHub Desktop.
HT_Keras_issue_2017_10_25
import os
import time
import numpy as np
from keras import objectives
from keras import regularizers
from keras import backend as K
from keras.layers import Input, Dense, Activation
from keras.models import Model
from keras.optimizers import Adam
from keras.layers.normalization import BatchNormalization
def main():
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
K.clear_session()
train_x = np.random.rand(1024,100)
train_y = np.random.rand(1024,16)
valid_x = np.random.rand(256,100)
valid_y = np.random.rand(256,16)
init_method = 'glorot_uniform'
model_in = Input(shape=(train_x.shape[1], ))
# H = Dense(96, kernel_initializer=init_method, kernel_regularizer=regularizers.l2(0.02))(model_in)
H = Dense(96, kernel_initializer=init_method)(model_in)
H = Activation('relu')(H)
model_out = Dense(16, kernel_initializer=init_method)(H)
model = Model(model_in, model_out)
model.summary()
model.compile(loss='mean_squared_error', optimizer=Adam())
epochs = 3
for epoch in range(1, epochs + 1):
h = model.fit(train_x, train_y, epochs=1,
batch_size=128, verbose=0,
validation_data=(valid_x, valid_y))
print("Epoch: {:04d} ".format(epoch),
"train_loss= {:.4f}".format(h.history['loss'][0]),
" ",
"valid_loss= {:.4f}".format(h.history['val_loss'][0]))
score_train = model.evaluate(x=train_x, y=train_y, batch_size=128, verbose=0)
score_valid = model.evaluate(x=valid_x, y=valid_y, batch_size=128, verbose=0)
y_predict = model.predict(train_x, batch_size=128, verbose=0)
score_numpy_train = np.mean(np.square(train_y - y_predict), axis=-1)
y_predict = model.predict(valid_x, batch_size=128, verbose=0)
score_numpy_valid = np.mean(np.square(valid_y - y_predict), axis=-1)
print("After training:",
"train_loss= {:.4f}".format(score_train),
"train_loss_numpy= {:.4f}".format(np.mean(score_numpy_train)),
"valid_loss= {:.4f}".format(score_valid),
"valid_loss_numpy= {:.4f}".format(np.mean(score_numpy_valid)))
return
main()
import os
import time
import numpy as np
from keras import objectives
from keras import regularizers
from keras import backend as K
from keras.layers import Input, Dense, Activation
from keras.models import Model
from keras.optimizers import Adam
from keras.layers.normalization import BatchNormalization
def main():
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
K.clear_session()
train_x = np.random.rand(1024,100)
train_y = np.random.rand(1024,16)
valid_x = np.random.rand(256,100)
valid_y = np.random.rand(256,16)
init_method = 'glorot_uniform'
model_in = Input(shape=(train_x.shape[1], ))
H = Dense(96, kernel_initializer=init_method, kernel_regularizer=regularizers.l2(0.02))(model_in)
# H = Dense(96, kernel_initializer=init_method)(model_in)
H = Activation('relu')(H)
model_out = Dense(16, kernel_initializer=init_method)(H)
model = Model(model_in, model_out)
model.summary()
model.compile(loss='mean_squared_error', optimizer=Adam())
epochs = 3
for epoch in range(1, epochs + 1):
h = model.fit(train_x, train_y, epochs=1,
batch_size=128, verbose=0,
validation_data=(valid_x, valid_y))
print("Epoch: {:04d} ".format(epoch),
"train_loss= {:.4f}".format(h.history['loss'][0]),
" ",
"valid_loss= {:.4f}".format(h.history['val_loss'][0]))
score_train = model.evaluate(x=train_x, y=train_y, batch_size=128, verbose=0)
score_valid = model.evaluate(x=valid_x, y=valid_y, batch_size=128, verbose=0)
y_predict = model.predict(train_x, batch_size=128, verbose=0)
score_numpy_train = np.mean(np.square(train_y - y_predict), axis=-1)
y_predict = model.predict(valid_x, batch_size=128, verbose=0)
score_numpy_valid = np.mean(np.square(valid_y - y_predict), axis=-1)
print("After training:",
"train_loss= {:.4f}".format(score_train),
"train_loss_numpy= {:.4f}".format(np.mean(score_numpy_train)),
"valid_loss= {:.4f}".format(score_valid),
"valid_loss_numpy= {:.4f}".format(np.mean(score_numpy_valid)))
return
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment