Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ratmcu/5877930832e1682f699f1a8ddd5a9958 to your computer and use it in GitHub Desktop.
Save ratmcu/5877930832e1682f699f1a8ddd5a9958 to your computer and use it in GitHub Desktop.
Python multiprocessing hello world. Split a list and process sublists in different jobs
import multiprocessing
import os
# split a list into evenly sized chunks
def chunks(l, n):
return [l[i:i+n] for i in range(0, len(l), n)]
def do_job(job_id, data_slice, queue):
for item in data_slice:
print ("job", job_id, item)
queue.put(item + ' done by job ' + str(job_id))
def dispatch_jobs(data, job_number):
total = len(data)
chunk_size = total // job_number
slice = chunks(data, chunk_size)
jobs = []
queue = multiprocessing.Queue()
for i, s in enumerate(slice):
j = multiprocessing.Process(target=do_job, args=(i, s, queue))
jobs.append(j)
for j in jobs:
j.start()
for j in jobs:
j.join()
while not queue.empty():
print(queue.get())
if __name__ == "__main__":
data = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p']
dispatch_jobs(data, os.cpu_count())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment