Skip to content

Instantly share code, notes, and snippets.

@caisq
Created September 11, 2018 03:44
Show Gist options
  • Save caisq/ef44c6bcb9e96b035b68bdd72b9a0b1e to your computer and use it in GitHub Desktop.
Save caisq/ef44c6bcb9e96b035b68bdd72b9a0b1e to your computer and use it in GitHub Desktop.
import time
import tensorflow as tf
tf.enable_eager_execution()
# with tf.device('gpu:0'):
def model(xs):
ys = tf.keras.layers.Conv2D(8, [2, 8], activation='relu')(xs)
ys = tf.keras.layers.MaxPool2D([2, 2], strides=[2, 2])(ys)
ys = tf.keras.layers.MaxPool2D([2, 2], strides=[2, 2])(ys)
ys = tf.keras.layers.Conv2D(32, [2, 4], activation='relu')(ys)
ys = tf.keras.layers.MaxPool2D([2, 2], strides=[2, 2])(ys)
ys = tf.keras.layers.Conv2D(32, [2, 4], activation='relu')(ys)
ys = tf.keras.layers.MaxPool2D([2, 2], strides=[1, 2])(ys)
ys = tf.keras.layers.Flatten()(ys)
ys = tf.keras.layers.Dense(2000, activation='relu')(ys)
ys = tf.keras.layers.Dense(10, activation='softmax')(ys)
return ys
def loss(xs, ys):
return tf.keras.losses.categorical_crossentropy(model(xs), ys)
batch_size = 48
xs = tf.random_normal([batch_size, 43, 232, 1])
ys = tf.random_uniform([batch_size, 10])
# print(loss(xs, ys))
# print(model(xs).shape)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001)
print('Calling minimize...')
t0 = time.time()
for i in range(100):
optimizer.minimize(lambda: loss(xs, ys))
print(time.time() - t0)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment