Created
November 7, 2017 23:37
-
-
Save unixpickle/ed27f6bc09a32f6e64ea86db9d198f4a to your computer and use it in GitHub Desktop.
Huge second-derivative graph
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
Generate some second-derivatives that make TF cry. | |
On my machine, this takes several minutes to run and uses | |
about 2GB of memory. | |
""" | |
import time | |
import tensorflow as tf | |
def build_graph(): | |
""" | |
Build a graph that TF hates. | |
""" | |
print('Building RNN...') | |
inputs = [tf.Variable(tf.random_normal((10, 5))) for _ in range(200)] | |
rnn_out, _, _ = tf.nn.static_bidirectional_rnn(tf.contrib.rnn.LSTMCell(128), | |
tf.contrib.rnn.LSTMCell(128), | |
inputs, | |
dtype=tf.float32) | |
rnn_out = tf.reduce_mean(tf.concat(rnn_out, 0)) | |
print('Building meta-loss...') | |
targets = [tf.placeholder(tf.float32, (10, 5)) for _ in inputs] | |
grads = tf.gradients(rnn_out, inputs) | |
meta_losses = [tf.reduce_mean(tf.square(target - grad)) | |
for target, grad in zip(targets, grads)] | |
meta_loss = tf.reduce_sum(tf.stack(meta_losses)) | |
print('Building gradient of meta-loss...') | |
tf.train.AdamOptimizer().minimize(meta_loss) | |
print('Done building graph.') | |
while True: | |
time.sleep(1) | |
if __name__ == '__main__': | |
build_graph() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment