Skip to content

Instantly share code, notes, and snippets.

@pmav99
Last active October 22, 2019 13:33
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save pmav99/65ef6527f8506eea590ea6fce0f38963 to your computer and use it in GitHub Desktop.
Save pmav99/65ef6527f8506eea590ea6fce0f38963 to your computer and use it in GitHub Desktop.
Simple tensorflow test
#!/usr/bin/env python3
import datetime
import numpy as np
import tensorflow as tf
# Processing Units logs
log_device_placement = True
# Num of multiplications to perform
n = 10
'''
Example: compute A^n + B^n on 2 GPUs
Results on 8 cores with 2 GTX-980:
* Single GPU computation time: 0:00:11.277449
* Multi GPU computation time: 0:00:07.131701
'''
# Create random large matrix
A = np.random.rand(10000, 10000).astype('float32')
B = np.random.rand(10000, 10000).astype('float32')
# Create a graph to store results
c1 = []
c2 = []
def matpow(M, n):
if n < 1: #Abstract cases where n < 1
return M
else:
return tf.multiply(M, matpow(M, n-1)
tcpu_0 = datetime.datetime.now()
with tf.device('/cpu:0'):
sum = tf.add_n(c1) #Addition of all elements in c1, i.e. A^n + B^n
tcpu_1 = datetime.datetime.now()
print("CPU sum: ", sum)
print("Single CPU computation time: " + str(tcpu_1 - tcpu_0))
with tf.device('/gpu:0'):
if tf.version.VERSION.startswith("1"):
a = tf.placeholder(tf.float32, A)
b = tf.placeholder(tf.float32, B)
else:
a = tf.Variable(A, tf.float32)
b = tf.Variable(B, tf.float32)
# Compute A^n and B^n and store results in c1
c1.append(matpow(a, n))
c1.append(matpow(b, n))
tgpu_0 = datetime.datetime.now()
with tf.Session(config=tf.ConfigProto(log_device_placement=log_device_placement)) as sess:
# Run the op.
sess.run(sum, {a:A, b:B})
tgpu_1 = datetime.datetime.now()
print("GPU sum: ", sum)
print("Single GPU computation time: " + str(tgpu_1 - tgpu_0))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment