Skip to content

Instantly share code, notes, and snippets.

@calincru
Last active July 9, 2019 13:04
Show Gist options
  • Save calincru/890fbf8ad4b547381adcd2cc632c35e3 to your computer and use it in GitHub Desktop.
Save calincru/890fbf8ad4b547381adcd2cc632c35e3 to your computer and use it in GitHub Desktop.
tf.linalg.eigh is slower on GPU than on CPU
import tensorflow as tf
import timeit
assert tf.test.is_gpu_available()
# See https://www.tensorflow.org/tutorials/using_gpu#allowing_gpu_memory_growth
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
def sym(x):
return 0.5 * (x + tf.matrix_transpose(x))
with tf.device('/cpu:0'):
wv_cpu = tf.linalg.eigh(sym(tf.random.uniform((100000, 2, 2))))
with tf.device('/device:GPU:0'):
wv_gpu = tf.linalg.eigh(sym(tf.random.uniform((100000, 2, 2))))
with tf.Session(config=config) as sess:
cpu = lambda: sess.run(wv_cpu)
gpu = lambda: sess.run(wv_gpu)
# We run each op once to warm up; see: https://stackoverflow.com/a/45067900
cpu()
gpu()
# Run the op several times.
cpu_time = timeit.timeit('cpu()', number=10, setup="from __main__ import cpu")
gpu_time = timeit.timeit('gpu()', number=10, setup="from __main__ import gpu")
print('CPU (s): ', cpu_time)
print('GPU (s): ', gpu_time)
print('CPU speedup over GPU: {}x'.format(int(gpu_time/cpu_time)))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment