Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Testing MultiProcessingLog on both Windows 7 Enterprise and Ubuntu Developers Version 10.04_20121120.
---
version: 1
disable_existing_loggers: False
formatters:
simple:
format: "%(name)-20s%(levelname)-8s%(message)s"
handlers:
console:
class: logging.StreamHandler
level: DEBUG
formatter: simple
stream: ext://sys.stdout
mplog:
class: mplog.MultiProcessingLog
level: DEBUG
formatter: simple
name: mplog.log
mode: a
maxsize: 1024
rotate: 0
root:
level: DEBUG
handlers: [console, mplog]
from logging.handlers import RotatingFileHandler
import multiprocessing, threading, logging, sys, traceback
import os
class MultiProcessingLog(logging.Handler):
def __init__(self, name, mode, maxsize, rotate):
logging.Handler.__init__(self)
self._handler = RotatingFileHandler(name, mode, maxsize, rotate)
self.queue = multiprocessing.Queue(-1)
t = threading.Thread(target=self.receive)
t.daemon = True
t.start()
def setFormatter(self, fmt):
logging.Handler.setFormatter(self, fmt)
self._handler.setFormatter(fmt)
def receive(self):
while True:
try:
record = self.queue.get()
self._handler.emit(record)
print('received on pid {}'.format(os.getpid()))
except (KeyboardInterrupt, SystemExit):
raise
except EOFError:
break
except:
traceback.print_exc(file=sys.stderr)
def send(self, s):
self.queue.put_nowait(s)
def _format_record(self, record):
# ensure that exc_info and args have been stringified. Removes any
# chance of unpickleable things inside and possibly reduces message size
# sent over the pipe
if record.args:
record.msg = record.msg % record.args
record.args = None
if record.exc_info:
dummy = self.format(record)
record.exc_info = None
return record
def emit(self, record):
try:
s = self._format_record(record)
self.send(s)
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
def close(self):
self._handler.close()
logging.Handler.close(self)
subproc INFO [20728] value begin
subproc INFO [20731] value 1
subproc INFO [20732] value 2
subproc INFO [20733] value 3
subproc INFO [20734] value 4
subproc INFO [20728] value end
subproc INFO [5432] value begin
subproc INFO [5432] value end
import logging
from multiprocessing import Pool
import os
from subproc import test
logger = logging.getLogger(__name__)
if __name__ == '__main__':
import logging.config
import yaml
path = 'logging.yaml'
if os.path.exists(path):
with open(path, 'rt') as f:
config = yaml.load(f.read())
logging.config.dictConfig(config)
test('begin')
p = Pool(4)
p.map(test, [1, 2, 3, 4])
test('end')
import logging
import os
logger = logging.getLogger(__name__)
def test(value):
msg = '[{}] value {}'.format(os.getpid(), value)
logger.info(msg)
@mattgathu

This comment has been minimized.

Copy link

mattgathu commented Apr 17, 2015

Thank you for this. Made a great difference for me 👍

@bijancn

This comment has been minimized.

Copy link

bijancn commented Jun 9, 2015

Helped a lot!!!

@DanRamage

This comment has been minimized.

Copy link

DanRamage commented Jan 2, 2016

Looking at the output-windows.txt, the logging does not work in windows?

@ShukujiNeel13

This comment has been minimized.

Copy link

ShukujiNeel13 commented Apr 5, 2018

Thank you for this approach. Helpful!

@yongsheng1992

This comment has been minimized.

Copy link

yongsheng1992 commented Apr 11, 2018

If the iterable args in the map function has a large size, e.g 1000+, the daemon thread in MultiProcessingLog will not process all the items in the queue before the main process exit.

@neilpryde1984

This comment has been minimized.

Copy link

neilpryde1984 commented Jul 31, 2018

cool stuff.
could you probably solve the strange windows behaviour?

@satyaaditya

This comment has been minimized.

Copy link

satyaaditya commented Apr 1, 2019

I implemented this TimedRotatingFileHandler, but i am facing an issue,
i am using logging.getLogger('loggername') to get the logger and the log statements after creating forks are not being logged to the log file
for more details, please go through this question https://stackoverflow.com/questions/55411291/python-logging-failure-with-multiprocessing

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.