Skip to content

Instantly share code, notes, and snippets.

@maxme1
Last active January 23, 2017 21:07
Show Gist options
  • Save maxme1/5da12c973be5abc375fdbb9803d4eb10 to your computer and use it in GitHub Desktop.
Save maxme1/5da12c973be5abc375fdbb9803d4eb10 to your computer and use it in GitHub Desktop.
import multiprocessing
def __process__(q_in, q_out):
np.random.seed()
while True:
data = q_in.get()
if data is None:
break
q_out.put((preprocess(data[0]), data[1]))
def iterate_minibatches(inputs, targets, batchsize, shuffle=True):
nprocs = multiprocessing.cpu_count() - 1
q_in = multiprocessing.Queue()
q_out = multiprocessing.Queue()
proc = [multiprocessing.Process(target=__process__, args=(q_in, q_out))
for _ in range(nprocs)]
for p in proc:
p.daemon = True
p.start()
instance = self.minibatches(inputs, targets, batchsize, shuffle)
batch = next(instance)
for i in zip(*batch):
q_in.put(i)
last_size = len(batch[0])
for batch in instance:
for i in zip(*batch):
q_in.put(i)
data = [q_out.get() for _ in range(last_size)]
yield list(zip(*data))
last_size = len(batch[0])
data = [q_out.get() for _ in range(last_size)]
yield list(zip(*data))
for i in range(nprocs):
q_in.put(None)
for p in proc:
p.join()
# My fkn slow function
def preprocess(x):
# ...
return x
# my regular iterator
def minibatches(inputs, targets, batchsize, shuffle=True):
# ...
yield some_stuff
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment