Skip to content

Instantly share code, notes, and snippets.

@lzamparo
Created March 22, 2013 22:35
Show Gist options
  • Save lzamparo/5225288 to your computer and use it in GitHub Desktop.
Save lzamparo/5225288 to your computer and use it in GitHub Desktop.
Theano test script, but computing exp on the GPU using gpu_from_host to wrap T.exp()
from theano import function, config, shared, sandbox
import theano.tensor as T
import numpy
import time
from datetime import datetime
from optparse import OptionParser
import os
parser = OptionParser()
parser.add_option("-d", "--dir", dest="dir", help="test output directory")
(options, args) = parser.parse_args()
vlen = 10 * 448 * 768 # 10 x #cores x # threads per core
iters = 1000
os.chdir(options.dir)
today = datetime.today()
day = str(today.date())
hour = str(today.time())
output_filename = "from_host_test." + day + "." + hour
output_file = open(output_filename,'w')
print >> output_file, "Run on " + str(datetime.now())
rng = numpy.random.RandomState(22)
x = shared(numpy.asarray(rng.rand(vlen), config.floatX))
f = function([], sandbox.cuda.basic_ops.gpu_from_host(T.exp(x)))
print f.maker.fgraph.toposort()
t0 = time.time()
for i in xrange(iters):
r = f()
t1 = time.time()
print >> output_file, 'Looping %d times took' % iters, t1 - t0, 'seconds'
print >> output_file, 'Result is', r
print >> output_file, 'Numpy result is', numpy.asarray(r)
if numpy.any([isinstance(x.op, T.Elemwise) for x in
f.maker.fgraph.toposort()]):
print >> output_file, 'Used the cpu'
else:
print >> output_file, 'Used the gpu'
output_file.close()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment