Skip to content

Instantly share code, notes, and snippets.

@gngdb
Last active June 3, 2020 16:39
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save gngdb/4607b2a82fb95dcbb43f350e61e60722 to your computer and use it in GitHub Desktop.
Save gngdb/4607b2a82fb95dcbb43f350e61e60722 to your computer and use it in GitHub Desktop.
Testing random seed setting in different threads
Tuples sampled in parallel or sequentially are equal:
0.496, 0.549, 0.844 == 0.496, 0.549, 0.844
0.758, 0.417, 0.134 == 0.758, 0.417, 0.134
0.615, 0.436, 0.956 == 0.615, 0.436, 0.956
0.004, 0.551, 0.238 == 0.004, 0.551, 0.238
0.560, 0.967, 0.236 == 0.560, 0.967, 0.236
0.830, 0.222, 0.623 == 0.830, 0.222, 0.623
0.572, 0.893, 0.793 == 0.572, 0.893, 0.793
0.535, 0.076, 0.324 == 0.535, 0.076, 0.324
0.598, 0.873, 0.227 == 0.598, 0.873, 0.227
0.656, 0.010, 0.463 == 0.656, 0.010, 0.463
0.458, 0.771, 0.571 == 0.458, 0.771, 0.571
0.149, 0.180, 0.452 == 0.149, 0.180, 0.452
0.466, 0.154, 0.475 == 0.466, 0.154, 0.475
0.092, 0.778, 0.259 == 0.092, 0.778, 0.259
0.569, 0.514, 0.107 == 0.569, 0.514, 0.107
0.297, 0.849, 0.965 == 0.297, 0.849, 0.965
0.163, 0.223, 0.362 == 0.163, 0.223, 0.362
0.434, 0.295, 0.522 == 0.434, 0.295, 0.522
0.496, 0.650, 0.181 == 0.496, 0.650, 0.181
0.969, 0.098, 0.677 == 0.969, 0.098, 0.677
0.561, 0.588, 0.906 == 0.561, 0.588, 0.906
0.474, 0.049, 0.165 == 0.474, 0.049, 0.165
0.366, 0.208, 0.958 == 0.366, 0.208, 0.958
0.428, 0.517, 0.925 == 0.428, 0.517, 0.925
0.764, 0.960, 0.712 == 0.764, 0.960, 0.712
0.752, 0.870, 0.377 == 0.752, 0.870, 0.377
0.831, 0.308, 0.747 == 0.831, 0.308, 0.747
0.985, 0.426, 0.648 == 0.985, 0.426, 0.648
0.628, 0.729, 0.113 == 0.628, 0.729, 0.113
0.123, 0.864, 0.548 == 0.123, 0.864, 0.548
0.901, 0.644, 0.539 == 0.901, 0.644, 0.539
0.230, 0.286, 0.012 == 0.230, 0.286, 0.012
0.876, 0.859, 0.077 == 0.876, 0.859, 0.077
0.619, 0.249, 0.570 == 0.619, 0.249, 0.570
0.872, 0.039, 0.529 == 0.872, 0.039, 0.529
0.262, 0.458, 0.549 == 0.262, 0.458, 0.549
0.498, 0.729, 0.329 == 0.498, 0.729, 0.329
0.791, 0.944, 0.682 == 0.791, 0.944, 0.682
0.502, 0.385, 0.639 == 0.502, 0.385, 0.639
0.004, 0.547, 0.210 == 0.004, 0.547, 0.210
0.368, 0.408, 0.459 == 0.368, 0.408, 0.459
0.236, 0.251, 0.381 == 0.236, 0.251, 0.381
0.882, 0.375, 0.639 == 0.882, 0.375, 0.639
0.454, 0.115, 0.039 == 0.454, 0.115, 0.039
0.720, 0.835, 0.409 == 0.720, 0.835, 0.409
0.187, 0.989, 0.272 == 0.187, 0.989, 0.272
0.661, 0.784, 0.888 == 0.661, 0.784, 0.888
0.053, 0.113, 0.352 == 0.053, 0.113, 0.352
0.478, 0.017, 0.548 == 0.478, 0.017, 0.548
0.047, 0.301, 0.067 == 0.047, 0.301, 0.067
0.618, 0.495, 0.498 == 0.618, 0.495, 0.498
0.987, 0.676, 0.244 == 0.987, 0.676, 0.244
0.716, 0.823, 0.978 == 0.716, 0.823, 0.978
0.746, 0.847, 0.617 == 0.746, 0.847, 0.617
0.567, 0.420, 0.914 == 0.567, 0.420, 0.914
0.836, 0.093, 0.090 == 0.836, 0.093, 0.090
0.953, 0.984, 0.966 == 0.953, 0.984, 0.966
0.362, 0.087, 0.043 == 0.362, 0.087, 0.043
0.467, 0.365, 0.581 == 0.467, 0.365, 0.581
0.553, 0.924, 0.224 == 0.553, 0.924, 0.224
0.024, 0.301, 0.308 == 0.024, 0.301, 0.308
0.064, 0.824, 0.494 == 0.064, 0.824, 0.494
0.641, 0.034, 0.928 == 0.641, 0.034, 0.928
0.809, 0.554, 0.445 == 0.809, 0.554, 0.445
0.049, 0.379, 0.476 == 0.049, 0.379, 0.476
0.973, 0.219, 0.415 == 0.973, 0.219, 0.415
0.498, 0.154, 0.071 == 0.498, 0.154, 0.071
0.738, 0.546, 0.075 == 0.738, 0.546, 0.075
0.399, 0.259, 0.742 == 0.399, 0.259, 0.742
0.840, 0.296, 0.684 == 0.840, 0.296, 0.684
0.435, 0.927, 0.910 == 0.435, 0.927, 0.910
0.507, 0.186, 0.324 == 0.507, 0.186, 0.324
0.325, 0.107, 0.073 == 0.325, 0.107, 0.073
0.529, 0.643, 0.280 == 0.529, 0.643, 0.280
0.764, 0.202, 0.860 == 0.764, 0.202, 0.860
0.649, 0.569, 0.451 == 0.649, 0.569, 0.451
0.572, 0.311, 0.371 == 0.572, 0.311, 0.371
0.292, 0.919, 0.799 == 0.292, 0.919, 0.799
0.334, 0.048, 0.814 == 0.334, 0.048, 0.814
0.171, 0.501, 0.145 == 0.171, 0.501, 0.145
0.610, 0.522, 0.271 == 0.610, 0.522, 0.271
0.987, 0.355, 0.506 == 0.987, 0.355, 0.506
0.458, 0.275, 0.145 == 0.458, 0.275, 0.145
0.803, 0.257, 0.496 == 0.803, 0.257, 0.496
0.786, 0.046, 0.732 == 0.786, 0.046, 0.732
0.816, 0.620, 0.200 == 0.816, 0.620, 0.200
0.867, 0.203, 0.792 == 0.867, 0.203, 0.792
0.004, 0.656, 0.146 == 0.004, 0.656, 0.146
0.773, 0.648, 0.397 == 0.773, 0.648, 0.397
0.922, 0.500, 0.081 == 0.922, 0.500, 0.081
0.182, 0.153, 0.204 == 0.182, 0.153, 0.204
0.457, 0.201, 0.084 == 0.457, 0.201, 0.084
0.785, 0.886, 0.421 == 0.785, 0.886, 0.421
0.227, 0.606, 0.912 == 0.227, 0.606, 0.912
0.163, 0.719, 0.547 == 0.163, 0.719, 0.547
0.574, 0.229, 0.760 == 0.574, 0.229, 0.760
0.960, 0.215, 0.371 == 0.960, 0.215, 0.371
0.258, 0.837, 0.195 == 0.258, 0.837, 0.195
0.460, 0.732, 0.356 == 0.460, 0.732, 0.356
0.103, 0.672, 0.404 == 0.103, 0.672, 0.404
# became nervous that results might be different if using seeds in this way
# in different threads
import torch
import numpy as np
import random
from joblib import Parallel, delayed
def run_experiment(seed):
# set seed in all the different things
torch.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
# sample
x = torch.rand((1,))
y = np.random.rand()
z = random.random()
return x.item(), y, z
if __name__ == "__main__":
results = []
for seed in range(100):
results.append(run_experiment(seed))
parallel_results = Parallel(n_jobs=-2)(delayed(run_experiment)(seed)
for seed in range(100))
# approx equality for tuples
approx_equal = lambda x,y: all(abs(a-b)<1e-6 for a,b in zip(x,y))
assert all(approx_equal(x,y) for x,y in zip(results, parallel_results))
print("Tuples sampled in parallel or sequentially are equal:")
for x,y in zip(results, parallel_results):
x, y = ", ".join("%.3f"%a for a in x), ", ".join("%.3f"%b for b in y)
print(f"{x} == {y}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment