Skip to content

Instantly share code, notes, and snippets.

@josePhoenix
Created November 17, 2015 18:51
Show Gist options
  • Save josePhoenix/2cab6892c3d893df1d3a to your computer and use it in GitHub Desktop.
Save josePhoenix/2cab6892c3d893df1d3a to your computer and use it in GitHub Desktop.
import sys
import multiprocessing
import logging
import logging.handlers
import datetime
import time
import random
N_PROCESSES = 32
def _worker_logging_setup(queue_instance):
queue_handler = logging.handlers.QueueHandler(queue_instance)
root = logging.getLogger()
root.addHandler(queue_handler)
root.setLevel(logging.DEBUG)
def test_function(i):
log = logging.getLogger("worker." + multiprocessing.current_process().name) # note: logger instances are not GCed, so creating many programmatically could result in unbounded memory use
time.sleep(random.random())
log.warning("Slept a random amount! My number is {}".format(i))
log.debug("DEBUGGING STUFF My number is {}".format(i))
return i
if __name__ == "__main__":
multiprocessing.set_start_method('forkserver')
log = logging.getLogger()
q = multiprocessing.Queue()
stdout_handler = logging.StreamHandler(stream=sys.stdout)
y, m, d, hh, mm, ss = datetime.datetime.now().utctimetuple()[:6]
logging_filename = 'demo_run_{y}-{m}-{d}-{hh}-{mm}-{ss}.log'.format(y=y, m=m, d=d, hh=hh, mm=mm, ss=ss)
file_handler = logging.FileHandler(logging_filename)
log.addHandler(file_handler)
listener = logging.handlers.QueueListener(q, file_handler, stdout_handler, respect_handler_level=True)
listener.start()
pool = multiprocessing.Pool(
processes=N_PROCESSES,
initializer=_worker_logging_setup,
initargs=(q,),
)
results = pool.imap_unordered(test_function, range(N_PROCESSES))
pool.close()
pool.join()
listener.stop()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment