Skip to content

Instantly share code, notes, and snippets.

@unnonouno
Last active March 1, 2017 02:41
Show Gist options
  • Save unnonouno/1d589f8d7f85b1d8fa3922ab51e3dccf to your computer and use it in GitHub Desktop.
Save unnonouno/1d589f8d7f85b1d8fa3922ab51e3dccf to your computer and use it in GitHub Desktop.
cuDNN benchmark
import contextlib
import time
import chainer
import chainer.functions as F
import cupy
@contextlib.contextmanager
def timer(message):
cupy.cuda.Stream.null.synchronize()
start = time.time()
yield
cupy.cuda.Stream.null.synchronize()
end = time.time()
print('%s: %f sec' % (message, end - start))
def randf(*shape):
return cupy.random.uniform(-1, 1, shape).astype('f')
# return cupy.asfortranarray(cupy.random.uniform(-1, 1, shape).astype('f'))
def randi(*shape):
return cupy.random.randint(0, 3, shape).astype('i')
def flatten(y):
if isinstance(y, (tuple, list)):
return sum([flatten(x) for x in y], [])
else:
return [y]
def run(func, args):
print(func.__name__)
with timer(' cudnn=False, forward '):
for i in range(100):
ys = func(*args, use_cudnn=False)
ys = flatten(ys)
y = sum([F.sum(y) for y in ys])
with timer(' cudnn=False, backward'):
for i in range(100):
y.backward()
with timer(' cudnn=True, forward '):
for i in range(100):
ys = func(*args, use_cudnn=True)
ys = flatten(ys)
y = sum([F.sum(y) for y in ys])
with timer(' cudnn=True, backward'):
for i in range(100):
y.backward()
def main():
chainer.function.type_check_enable = False
F.log_softmax(randf(10, 10), use_cudnn=False)
F.log_softmax(randf(10, 10), use_cudnn=True)
tests = [
(F.log_softmax, [randf(1000, 1000)]),
(F.relu, [randf(1000, 1000)]),
(F.sigmoid, [randf(1000, 1000)]),
(F.softmax, [randf(1000, 1000)]),
(F.tanh, [randf(1000, 1000)]),
(F.convolution_2d, [randf(32, 3, 256, 256), randf(3, 3, 5, 5)]),
(F.convolution_nd, [randf(10, 3, 32, 32, 32),
randf(3, 3, 5, 5, 5)]),
(F.deconvolution_2d, [randf(32, 3, 256, 256), randf(3, 3, 5, 5)]),
(F.deconvolution_nd, [randf(10, 3, 32, 32, 32),
randf(3, 3, 5, 5, 5)]),
(F.dilated_convolution_2d,
[randf(32, 3, 256, 256), randf(3, 3, 5, 5)]),
(F.n_step_lstm,
[3, 0.1, randf(3, 32, 256), randf(3, 32, 256),
[[randf(256, 256)] * 8] * 3, [[randf(256)] * 8] * 3,
[randf(32, 256)] * 10]),
(F.sigmoid_cross_entropy, [randf(1000, 1000), randi(1000, 1000)]),
(F.softmax_cross_entropy, [randf(1000, 1000), randi(1000)]),
(F.batch_normalization, [randf(1000, 1000), randf(1000), randf(1000)]),
(F.average_pooling_2d, [randf(32, 3, 256, 256), 5]),
(F.average_pooling_nd, [randf(32, 3, 256, 256), 5]),
(F.max_pooling_2d, [randf(32, 3, 256, 256), 5]),
(F.max_pooling_nd, [randf(32, 3, 256, 256), 5]),
]
for fun, args in tests:
run(fun, args)
if __name__ == '__main__':
main()
log_softmax
cudnn=False, forward : 0.032749 sec
cudnn=False, backward: 0.039907 sec
cudnn=True, forward : 0.012376 sec
cudnn=True, backward: 0.317023 sec
relu
cudnn=False, forward : 0.014981 sec
cudnn=False, backward: 0.019866 sec
cudnn=True, forward : 0.011890 sec
cudnn=True, backward: 0.019084 sec
sigmoid
cudnn=False, forward : 0.014846 sec
cudnn=False, backward: 0.019715 sec
cudnn=True, forward : 0.011979 sec
cudnn=True, backward: 0.019054 sec
softmax
cudnn=False, forward : 0.029846 sec
cudnn=False, backward: 0.030203 sec
cudnn=True, forward : 0.012462 sec
cudnn=True, backward: 0.303333 sec
tanh
cudnn=False, forward : 0.013843 sec
cudnn=False, backward: 0.019770 sec
cudnn=True, forward : 0.011990 sec
cudnn=True, backward: 0.019057 sec
convolution_2d
cudnn=False, forward : 2.110256 sec
cudnn=False, backward: 19.123759 sec
cudnn=True, forward : 0.197926 sec
cudnn=True, backward: 0.596160 sec
convolution_nd
cudnn=False, forward : 1.249454 sec
cudnn=False, backward: 3.309162 sec
cudnn=True, forward : 0.123063 sec
cudnn=True, backward: 0.615278 sec
deconvolution_2d
cudnn=False, forward : 1.577584 sec
cudnn=False, backward: 20.116773 sec
cudnn=True, forward : 0.247199 sec
cudnn=True, backward: 0.572957 sec
deconvolution_nd
cudnn=False, forward : 0.848029 sec
cudnn=False, backward: 5.902399 sec
cudnn=True, forward : 0.655159 sec
cudnn=True, backward: 0.438626 sec
dilated_convolution_2d
cudnn=False, forward : 2.006309 sec
cudnn=False, backward: 19.166945 sec
cudnn=True, forward : 2.381277 sec
cudnn=True, backward: 13.502654 sec
n_step_lstm
cudnn=False, forward : 5.154217 sec
cudnn=False, backward: 2.588497 sec
cudnn=True, forward : 0.640765 sec
cudnn=True, backward: 0.572446 sec
sigmoid_cross_entropy
cudnn=False, forward : 0.184929 sec
cudnn=False, backward: 0.051874 sec
cudnn=True, forward : 0.150344 sec
cudnn=True, backward: 0.032857 sec
softmax_cross_entropy
cudnn=False, forward : 0.066374 sec
cudnn=False, backward: 0.022766 sec
cudnn=True, forward : 0.043783 sec
cudnn=True, backward: 0.016709 sec
batch_normalization
cudnn=False, forward : 0.092315 sec
cudnn=False, backward: 0.040535 sec
cudnn=True, forward : 0.025238 sec
cudnn=True, backward: 0.040831 sec
average_pooling_2d
cudnn=False, forward : 0.019958 sec
cudnn=False, backward: 0.078952 sec
cudnn=True, forward : 0.013500 sec
cudnn=True, backward: 0.055199 sec
average_pooling_nd
cudnn=False, forward : 0.021122 sec
cudnn=False, backward: 0.079859 sec
cudnn=True, forward : 0.015134 sec
cudnn=True, backward: 0.055067 sec
max_pooling_2d
cudnn=False, forward : 0.020844 sec
cudnn=False, backward: 0.079874 sec
cudnn=True, forward : 0.013794 sec
cudnn=True, backward: 0.064925 sec
max_pooling_nd
cudnn=False, forward : 0.020956 sec
cudnn=False, backward: 0.079999 sec
cudnn=True, forward : 0.013988 sec
cudnn=True, backward: 0.065005 sec
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment