Skip to content

Instantly share code, notes, and snippets.

@vsajip
Last active June 25, 2018 01:53
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save vsajip/318e6ea2261c571d838d22a8c882727e to your computer and use it in GitHub Desktop.
Save vsajip/318e6ea2261c571d838d22a8c882727e to your computer and use it in GitHub Desktop.
Supervisor on Python3: no-text-socket approach - diff (including fixture data), and results of manual testing. When applied to fa7e3f5, all tests pass on 2.x and 3.x
diff --git a/.gitignore b/.gitignore
index d656399..4fff496 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,6 +4,7 @@
*.pyc
*.pyo
*.swp
+*.pss
.DS_Store
.coverage
.eggs/
diff --git a/supervisor/childutils.py b/supervisor/childutils.py
index 1311061..c0e5aa2 100644
--- a/supervisor/childutils.py
+++ b/supervisor/childutils.py
@@ -3,6 +3,7 @@ import time
from supervisor.compat import xmlrpclib
from supervisor.compat import long
+from supervisor.compat import as_string
from supervisor.xmlrpc import SupervisorTransport
from supervisor.events import ProcessCommunicationEvent
@@ -58,7 +59,7 @@ class EventListenerProtocol:
return headers, payload
def ready(self, stdout=sys.stdout):
- stdout.write(PEventListenerDispatcher.READY_FOR_EVENTS_TOKEN)
+ stdout.write(as_string(PEventListenerDispatcher.READY_FOR_EVENTS_TOKEN))
stdout.flush()
def ok(self, stdout=sys.stdout):
@@ -69,7 +70,7 @@ class EventListenerProtocol:
def send(self, data, stdout=sys.stdout):
resultlen = len(data)
- result = '%s%s\n%s' % (PEventListenerDispatcher.RESULT_TOKEN_START,
+ result = '%s%s\n%s' % (as_string(PEventListenerDispatcher.RESULT_TOKEN_START),
str(resultlen),
data)
stdout.write(result)
diff --git a/supervisor/compat.py b/supervisor/compat.py
index 7e606aa..03935bc 100644
--- a/supervisor/compat.py
+++ b/supervisor/compat.py
@@ -14,6 +14,10 @@ if PY3: # pragma: no cover
def as_bytes(s): return s if isinstance(s,bytes) else s.encode('utf8')
def as_string(s): return s if isinstance(s,str) else s.decode('utf8')
+ def is_text_stream(stream):
+ import _io
+ return isinstance(stream, _io._TextIOBase)
+
else: # pragma: no cover
long = long
raw_input = raw_input
@@ -22,6 +26,18 @@ else: # pragma: no cover
def as_bytes(s): return s if isinstance(s, str) else s.encode('utf-8')
def as_string(s): return s if isinstance(s, unicode) else s.decode('utf-8')
+ def is_text_stream(stream):
+ # TODO sort out for Python 2.4, 2.5 and 2.6 when a stream is
+ # opened using plain open() or codecs.open() rather than io.open()
+ if isinstance(stream, file):
+ return 'b' not in stream.mode
+ try:
+ import _io
+ return isinstance(stream, _io._TextIOBase)
+ except ImportError:
+ import io
+ return isinstance(stream, io.TextIOWrapper)
+
def total_ordering(cls): # pragma: no cover
"""Class decorator that fills in missing ordering methods"""
convert = {
diff --git a/supervisor/dispatchers.py b/supervisor/dispatchers.py
index 69e81f3..5a96199 100644
--- a/supervisor/dispatchers.py
+++ b/supervisor/dispatchers.py
@@ -2,6 +2,7 @@ import warnings
import errno
from supervisor.medusa.asyncore_25 import compact_traceback
+from supervisor.compat import as_string
from supervisor.events import notify
from supervisor.events import EventRejectedEvent
from supervisor.events import ProcessLogStderrEvent
@@ -83,7 +84,7 @@ class POutputDispatcher(PDispatcher):
mainlog = None # the process' "normal" logger
capturelog = None # the logger while we're in capturemode
childlog = None # the current logger (event or main)
- output_buffer = '' # data waiting to be logged
+ output_buffer = b'' # data waiting to be logged
def __init__(self, process, event_type, fd):
"""
@@ -171,10 +172,17 @@ class POutputDispatcher(PDispatcher):
if self.childlog:
self.childlog.info(data)
if self.log_to_mainlog:
+ if not isinstance(data, bytes):
+ text = data
+ else:
+ try:
+ text = data.decode('utf-8')
+ except UnicodeDecodeError:
+ text = 'Undecodable: %r' % data
msg = '%(name)r %(channel)s output:\n%(data)s'
config.options.logger.log(
self.mainlog_level, msg, name=config.name,
- channel=self.channel, data=data)
+ channel=self.channel, data=text)
if self.channel == 'stdout':
if self.stdout_events_enabled:
notify(
@@ -192,7 +200,7 @@ class POutputDispatcher(PDispatcher):
if self.capturelog is None:
# shortcut trying to find capture data
data = self.output_buffer
- self.output_buffer = ''
+ self.output_buffer = b''
self._log(data)
return
@@ -205,7 +213,7 @@ class POutputDispatcher(PDispatcher):
return # not enough data
data = self.output_buffer
- self.output_buffer = ''
+ self.output_buffer = b''
try:
before, after = data.split(token, 1)
@@ -270,10 +278,10 @@ class PEventListenerDispatcher(PDispatcher):
""" An output dispatcher that monitors and changes a process'
listener_state """
childlog = None # the logger
- state_buffer = '' # data waiting to be reviewed for state changes
+ state_buffer = b'' # data waiting to be reviewed for state changes
- READY_FOR_EVENTS_TOKEN = 'READY\n'
- RESULT_TOKEN_START = 'RESULT '
+ READY_FOR_EVENTS_TOKEN = b'READY\n'
+ RESULT_TOKEN_START = b'RESULT '
READY_FOR_EVENTS_LEN = len(READY_FOR_EVENTS_TOKEN)
RESULT_TOKEN_START_LEN = len(RESULT_TOKEN_START)
@@ -283,7 +291,7 @@ class PEventListenerDispatcher(PDispatcher):
# "busy" state that implies we're awaiting a READY_FOR_EVENTS_TOKEN
self.process.listener_state = EventListenerStates.ACKNOWLEDGED
self.process.event = None
- self.result = ''
+ self.result = b''
self.resultlen = None
logfile = getattr(process.config, '%s_logfile' % channel)
@@ -352,7 +360,7 @@ class PEventListenerDispatcher(PDispatcher):
if state == EventListenerStates.UNKNOWN:
# this is a fatal state
- self.state_buffer = ''
+ self.state_buffer = b''
return
if state == EventListenerStates.ACKNOWLEDGED:
@@ -366,7 +374,7 @@ class PEventListenerDispatcher(PDispatcher):
process.event = None
else:
self._change_listener_state(EventListenerStates.UNKNOWN)
- self.state_buffer = ''
+ self.state_buffer = b''
process.event = None
if self.state_buffer:
# keep going til its too short
@@ -377,14 +385,14 @@ class PEventListenerDispatcher(PDispatcher):
elif state == EventListenerStates.READY:
# the process sent some spurious data, be strict about it
self._change_listener_state(EventListenerStates.UNKNOWN)
- self.state_buffer = ''
+ self.state_buffer = b''
process.event = None
return
elif state == EventListenerStates.BUSY:
if self.resultlen is None:
# we haven't begun gathering result data yet
- pos = data.find('\n')
+ pos = data.find(b'\n')
if pos == -1:
# we can't make a determination yet, we dont have a full
# results line
@@ -396,11 +404,15 @@ class PEventListenerDispatcher(PDispatcher):
try:
self.resultlen = int(resultlen)
except ValueError:
+ try:
+ result_line = as_string(result_line)
+ except UnicodeDecodeError:
+ result_line = 'Undecodable: %r' % result_line
process.config.options.logger.warn(
- '%s: bad result line: %r' % (procname, result_line)
+ '%s: bad result line: \'%s\'' % (procname, result_line)
)
self._change_listener_state(EventListenerStates.UNKNOWN)
- self.state_buffer = ''
+ self.state_buffer = b''
notify(EventRejectedEvent(process, process.event))
process.event = None
return
@@ -416,7 +428,7 @@ class PEventListenerDispatcher(PDispatcher):
if not needed:
self.handle_result(self.result)
self.process.event = None
- self.result = ''
+ self.result = b''
self.resultlen = None
if self.state_buffer:
@@ -465,7 +477,7 @@ class PInputDispatcher(PDispatcher):
def __init__(self, process, channel, fd):
PDispatcher.__init__(self, process, channel, fd)
- self.input_buffer = ''
+ self.input_buffer = b''
def writable(self):
if self.input_buffer and not self.closed:
@@ -487,25 +499,25 @@ class PInputDispatcher(PDispatcher):
self.flush()
except OSError as why:
if why.args[0] == errno.EPIPE:
- self.input_buffer = ''
+ self.input_buffer = b''
self.close()
else:
raise
-ANSI_ESCAPE_BEGIN = '\x1b['
-ANSI_TERMINATORS = ('H', 'f', 'A', 'B', 'C', 'D', 'R', 's', 'u', 'J',
- 'K', 'h', 'l', 'p', 'm')
+ANSI_ESCAPE_BEGIN = b'\x1b['
+ANSI_TERMINATORS = (b'H', b'f', b'A', b'B', b'C', b'D', b'R', b's', b'u', b'J',
+ b'K', b'h', b'l', b'p', b'm')
def stripEscapes(s):
"""
Remove all ANSI color escapes from the given string.
"""
- result = ''
+ result = b''
show = 1
i = 0
L = len(s)
while i < L:
- if show == 0 and s[i] in ANSI_TERMINATORS:
+ if show == 0 and s[i:i + 1] in ANSI_TERMINATORS:
show = 1
elif show:
n = s.find(ANSI_ESCAPE_BEGIN, i)
@@ -523,5 +535,5 @@ class RejectEvent(Exception):
to reject an event """
def default_handler(event, response):
- if response != 'OK':
+ if response != b'OK':
raise RejectEvent(response)
diff --git a/supervisor/events.py b/supervisor/events.py
index 4c7011f..b6dc86c 100644
--- a/supervisor/events.py
+++ b/supervisor/events.py
@@ -1,10 +1,11 @@
from supervisor.states import getProcessStateDescription
+from supervisor.compat import as_string
callbacks = []
def subscribe(type, callback):
callbacks.append((type, callback))
-
+
def notify(event):
for type, callback in callbacks:
if isinstance(event, type):
@@ -25,16 +26,22 @@ class ProcessLogEvent(Event):
self.pid = pid
self.data = data
- def __str__(self):
+ def payload(self):
groupname = ''
if self.process.group is not None:
groupname = self.process.group.config.name
- return 'processname:%s groupname:%s pid:%s channel:%s\n%s' % (
- self.process.config.name,
- groupname,
- self.pid,
- self.channel,
- self.data)
+ try:
+ data = as_string(self.data)
+ except UnicodeDecodeError:
+ data = 'Undecodable: %r' % self.data
+ # On Python 2, stuff needs to be in Unicode before invoking the
+ # % operator, otherwise implicit encodings to ASCII can cause
+ # failures
+ fmt = as_string('processname:%s groupname:%s pid:%s channel:%s\n%s')
+ result = fmt % (as_string(self.process.config.name),
+ as_string(groupname), self.pid,
+ as_string(self.channel), data)
+ return result
class ProcessLogStdoutEvent(ProcessLogEvent):
channel = 'stdout'
@@ -45,23 +52,27 @@ class ProcessLogStderrEvent(ProcessLogEvent):
class ProcessCommunicationEvent(Event):
""" Abstract """
# event mode tokens
- BEGIN_TOKEN = '<!--XSUPERVISOR:BEGIN-->'
- END_TOKEN = '<!--XSUPERVISOR:END-->'
+ BEGIN_TOKEN = b'<!--XSUPERVISOR:BEGIN-->'
+ END_TOKEN = b'<!--XSUPERVISOR:END-->'
def __init__(self, process, pid, data):
self.process = process
self.pid = pid
self.data = data
- def __str__(self):
+ def payload(self):
groupname = ''
if self.process.group is not None:
groupname = self.process.group.config.name
+ try:
+ data = as_string(self.data)
+ except UnicodeDecodeError:
+ data = 'Undecodable: %r' % self.data
return 'processname:%s groupname:%s pid:%s\n%s' % (
self.process.config.name,
groupname,
self.pid,
- self.data)
+ data)
class ProcessCommunicationStdoutEvent(ProcessCommunicationEvent):
channel = 'stdout'
@@ -74,12 +85,12 @@ class RemoteCommunicationEvent(Event):
self.type = type
self.data = data
- def __str__(self):
+ def payload(self):
return 'type:%s\n%s' % (self.type, self.data)
class SupervisorStateChangeEvent(Event):
""" Abstract class """
- def __str__(self):
+ def payload(self):
return ''
class SupervisorRunningEvent(SupervisorStateChangeEvent):
@@ -88,7 +99,7 @@ class SupervisorRunningEvent(SupervisorStateChangeEvent):
class SupervisorStoppingEvent(SupervisorStateChangeEvent):
pass
-class EventRejectedEvent: # purposely does not subclass Event
+class EventRejectedEvent: # purposely does not subclass Event
def __init__(self, process, event):
self.process = process
self.event = event
@@ -105,7 +116,7 @@ class ProcessStateEvent(Event):
# us, we stash the values at the time the event was sent
self.extra_values = self.get_extra_values()
- def __str__(self):
+ def payload(self):
groupname = ''
if self.process.group is not None:
groupname = self.process.group.config.name
@@ -153,7 +164,8 @@ class ProcessStateStoppedEvent(ProcessStateEvent):
class ProcessGroupEvent(Event):
def __init__(self, group):
self.group = group
- def __str__(self):
+
+ def payload(self):
return 'groupname:%s\n' % self.group
class ProcessGroupAddedEvent(ProcessGroupEvent):
@@ -168,7 +180,7 @@ class TickEvent(Event):
self.when = when
self.supervisord = supervisord
- def __str__(self):
+ def payload(self):
return 'when:%s' % self.when
class Tick5Event(TickEvent):
@@ -198,7 +210,7 @@ class EventTypes:
PROCESS_COMMUNICATION_STDERR = ProcessCommunicationStderrEvent
PROCESS_LOG = ProcessLogEvent
PROCESS_LOG_STDOUT = ProcessLogStdoutEvent
- PROCESS_LOG_STDERR = ProcessLogStderrEvent
+ PROCESS_LOG_STDERR = ProcessLogStderrEvent
REMOTE_COMMUNICATION = RemoteCommunicationEvent
SUPERVISOR_STATE_CHANGE = SupervisorStateChangeEvent # abstract
SUPERVISOR_STATE_CHANGE_RUNNING = SupervisorRunningEvent
diff --git a/supervisor/http.py b/supervisor/http.py
index 2e260cd..ee30fc9 100644
--- a/supervisor/http.py
+++ b/supervisor/http.py
@@ -15,6 +15,7 @@ except ImportError: # Windows
from supervisor.compat import urllib
from supervisor.compat import sha1
from supervisor.compat import as_bytes
+from supervisor.compat import as_string
from supervisor.medusa import asyncore_25 as asyncore
from supervisor.medusa import http_date
from supervisor.medusa import http_server
@@ -49,15 +50,16 @@ class deferring_chunked_producer:
if data is NOT_DONE_YET:
return NOT_DONE_YET
elif data:
- return '%x\r\n%s\r\n' % (len(data), data)
+ s = '%x' % len(data)
+ return as_bytes(s) + b'\r\n' + data + b'\r\n'
else:
self.producer = None
if self.footers:
- return '\r\n'.join(['0'] + self.footers) + '\r\n\r\n'
+ return b'\r\n'.join([b'0'] + self.footers) + b'\r\n\r\n'
else:
- return '0\r\n\r\n'
+ return b'0\r\n\r\n'
else:
- return ''
+ return b''
class deferring_composite_producer:
"""combine a fifo of producers into one"""
@@ -76,7 +78,7 @@ class deferring_composite_producer:
else:
self.producers.pop(0)
else:
- return ''
+ return b''
class deferring_globbing_producer:
@@ -88,7 +90,7 @@ class deferring_globbing_producer:
def __init__ (self, producer, buffer_size=1<<16):
self.producer = producer
- self.buffer = ''
+ self.buffer = b''
self.buffer_size = buffer_size
self.delay = 0.1
@@ -105,7 +107,7 @@ class deferring_globbing_producer:
else:
break
r = self.buffer
- self.buffer = ''
+ self.buffer = b''
return r
@@ -134,7 +136,7 @@ class deferring_hooked_producer:
self.bytes += len(result)
return result
else:
- return ''
+ return b''
class deferring_http_request(http_server.http_request):
@@ -350,13 +352,6 @@ class deferring_http_channel(http_server.http_channel):
else:
return False
- # It is possible that self.ac_out_buffer is equal b''
- # and in Python3 b'' is not equal ''. This cause
- # http_server.http_channel.writable(self) is always True.
- # To avoid this case, we need to force self.ac_out_buffer = ''
- if len(self.ac_out_buffer) == 0:
- self.ac_out_buffer = ''
-
return http_server.http_channel.writable(self)
def refill_buffer (self):
@@ -371,7 +366,7 @@ class deferring_http_channel(http_server.http_channel):
self.producer_fifo.pop()
self.close()
return
- elif isinstance(p, str):
+ elif isinstance(p, bytes):
self.producer_fifo.pop()
self.ac_out_buffer += p
return
@@ -383,11 +378,7 @@ class deferring_http_channel(http_server.http_channel):
return
elif data:
- try:
- self.ac_out_buffer = self.ac_out_buffer + data
- except TypeError:
- self.ac_out_buffer = as_bytes(self.ac_out_buffer) + as_bytes(data)
-
+ self.ac_out_buffer = self.ac_out_buffer + data
self.delay = False
return
else:
@@ -402,8 +393,11 @@ class deferring_http_channel(http_server.http_channel):
if self.current_request:
self.current_request.found_terminator()
else:
- header = self.in_buffer
- self.in_buffer = ''
+ # we convert the header to text to facilitate processing.
+ # some of the underlying APIs (such as splitquery)
+ # expect text rather than bytes.
+ header = as_string(self.in_buffer)
+ self.in_buffer = b''
lines = header.split('\r\n')
# --------------------------------------------------
@@ -431,12 +425,12 @@ class deferring_http_channel(http_server.http_channel):
rpath, rquery = http_server.splitquery(uri)
if '%' in rpath:
if rquery:
- uri = http_server.unquote (rpath) + '?' + rquery
+ uri = http_server.unquote(rpath) + '?' + rquery
else:
- uri = http_server.unquote (rpath)
+ uri = http_server.unquote(rpath)
- r = deferring_http_request (self, request, command, uri, version,
- header)
+ r = deferring_http_request(self, request, command, uri, version,
+ header)
self.request_counter.increment()
self.server.total_requests.increment()
@@ -661,7 +655,7 @@ class tail_f_producer:
newsz = self._fsize()
except (OSError, ValueError):
# file descriptor was closed
- return ''
+ return b''
bytes_added = newsz - self.sz
if bytes_added < 0:
self.sz = 0
diff --git a/supervisor/http_client.py b/supervisor/http_client.py
index afb7d89..917fa3a 100644
--- a/supervisor/http_client.py
+++ b/supervisor/http_client.py
@@ -9,9 +9,9 @@ from supervisor.compat import as_string
from supervisor.compat import encodestring
from supervisor.medusa import asynchat_25 as asynchat
-CR="\x0d"
-LF="\x0a"
-CRLF=CR+LF
+CR = b'\x0d'
+LF = b'\x0a'
+CRLF = CR+LF
class Listener(object):
@@ -28,6 +28,10 @@ class Listener(object):
pass
def feed(self, url, data):
+ try:
+ data = as_string(data)
+ except UnicodeDecodeError:
+ data = 'Undecodable: %r' % data
sys.stdout.write(data)
sys.stdout.flush()
@@ -46,7 +50,7 @@ class HTTPHandler(asynchat.async_chat):
asynchat.async_chat.__init__(self, conn, map)
self.listener = listener
self.user_agent = 'Supervisor HTTP Client'
- self.buffer = ''
+ self.buffer = b''
self.set_terminator(CRLF)
self.connected = 0
self.part = self.status_line
@@ -139,21 +143,21 @@ class HTTPHandler(asynchat.async_chat):
self.buffer = self.buffer + bytes
if self.part==self.body:
self.feed(self.buffer)
- self.buffer = ''
+ self.buffer = b''
def found_terminator(self):
self.part()
- self.buffer = ''
+ self.buffer = b''
def ignore(self):
- self.buffer = ''
+ self.buffer = b''
def status_line(self):
line = self.buffer
version, status, reason = line.split(None, 2)
status = int(status)
- if not version.startswith('HTTP/'):
+ if not version.startswith(b'HTTP/'):
raise ValueError(line)
self.listener.status(self.url, status)
@@ -170,19 +174,19 @@ class HTTPHandler(asynchat.async_chat):
def headers(self):
line = self.buffer
if not line:
- if self.encoding=="chunked":
+ if self.encoding == b'chunked':
self.part = self.chunked_size
else:
self.part = self.body
self.set_terminator(self.length)
else:
- name, value = line.split(":", 1)
+ name, value = line.split(b':', 1)
if name and value:
name = name.lower()
value = value.strip()
- if name=="transfer-encoding":
+ if name == b'transfer-encoding':
self.encoding = value
- elif name=="content-length":
+ elif name == b'content-length':
self.length = int(value)
self.response_header(name, value)
@@ -218,6 +222,6 @@ class HTTPHandler(asynchat.async_chat):
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
# trailer = *(entity-header CRLF)
line = self.buffer
- if line==CRLF:
+ if line == CRLF:
self.done()
self.close()
diff --git a/supervisor/loggers.py b/supervisor/loggers.py
index fd700a4..5793698 100644
--- a/supervisor/loggers.py
+++ b/supervisor/loggers.py
@@ -15,6 +15,7 @@ import traceback
from supervisor.compat import syslog
from supervisor.compat import long
+from supervisor.compat import is_text_stream
class LevelsByName:
CRIT = 50 # messages that probably require immediate user attention
@@ -86,11 +87,24 @@ class Handler:
def emit(self, record):
try:
- msg = self.fmt % record.asdict()
+ binary = (self.fmt == '%(message)s' and
+ isinstance(record.msg, bytes) and
+ (not record.kw or record.kw == {'exc_info': None}))
+ binary_stream = not is_text_stream(self.stream)
+ if binary:
+ msg = record.msg
+ else:
+ msg = self.fmt % record.asdict()
+ if binary_stream:
+ msg = msg.encode('utf-8')
try:
self.stream.write(msg)
except UnicodeError:
- self.stream.write(msg.encode("UTF-8"))
+ # TODO sort out later
+ # this only occurs because of a test stream type
+ # which deliberately raises an exception the first
+ # time it's called. So just do it again
+ self.stream.write(msg)
self.flush()
except:
self.handleError()
@@ -112,7 +126,7 @@ class StreamHandler(Handler):
pass
class BoundIO:
- def __init__(self, maxbytes, buf=''):
+ def __init__(self, maxbytes, buf=b''):
self.maxbytes = maxbytes
self.buf = buf
@@ -122,34 +136,34 @@ class BoundIO:
def close(self):
self.clear()
- def write(self, s):
- slen = len(s)
- if len(self.buf) + slen > self.maxbytes:
- self.buf = self.buf[slen:]
- self.buf += s
+ def write(self, b):
+ blen = len(b)
+ if len(self.buf) + blen > self.maxbytes:
+ self.buf = self.buf[blen:]
+ self.buf += b
def getvalue(self):
return self.buf
def clear(self):
- self.buf = ''
+ self.buf = b''
class FileHandler(Handler):
"""File handler which supports reopening of logs.
"""
- def __init__(self, filename, mode='a'):
+ def __init__(self, filename, mode='ab'):
Handler.__init__(self)
try:
self.stream = open(filename, mode)
except OSError as e:
- if mode == 'a' and e.errno == errno.ESPIPE:
+ if mode == 'ab' and e.errno == errno.ESPIPE:
# Python 3 can't open special files like
# /dev/stdout in 'a' mode due to an implicit seek call
# that fails with ESPIPE. Retry in 'w' mode.
# See: http://bugs.python.org/issue27805
- mode = 'w'
+ mode = 'wb'
self.stream = open(filename, mode)
else:
raise
@@ -171,7 +185,7 @@ class FileHandler(Handler):
raise
class RotatingFileHandler(FileHandler):
- def __init__(self, filename, mode='a', maxBytes=512*1024*1024,
+ def __init__(self, filename, mode='ab', maxBytes=512*1024*1024,
backupCount=10):
"""
Open the specified file and use it as the stream for logging.
@@ -194,7 +208,7 @@ class RotatingFileHandler(FileHandler):
If maxBytes is zero, rollover never occurs.
"""
if maxBytes > 0:
- mode = 'a' # doesn't make sense otherwise!
+ mode = 'ab' # doesn't make sense otherwise!
FileHandler.__init__(self, filename, mode)
self.maxBytes = maxBytes
self.backupCount = backupCount
@@ -258,7 +272,7 @@ class RotatingFileHandler(FileHandler):
self.removeAndRename(sfn, dfn)
dfn = self.baseFilename + ".1"
self.removeAndRename(self.baseFilename, dfn)
- self.stream = open(self.baseFilename, 'w')
+ self.stream = open(self.baseFilename, 'wb')
class LogRecord:
def __init__(self, level, msg, **kw):
diff --git a/supervisor/medusa/asynchat_25.py b/supervisor/medusa/asynchat_25.py
index bb8976d..5db792a 100644
--- a/supervisor/medusa/asynchat_25.py
+++ b/supervisor/medusa/asynchat_25.py
@@ -49,6 +49,7 @@ you - by calling your self.found_terminator() method.
import socket
from supervisor.medusa import asyncore_25 as asyncore
from supervisor.compat import long
+from supervisor.compat import as_bytes
class async_chat (asyncore.dispatcher):
"""This is an abstract class. You must derive from this class, and add
@@ -60,8 +61,8 @@ class async_chat (asyncore.dispatcher):
ac_out_buffer_size = 4096
def __init__ (self, conn=None, map=None):
- self.ac_in_buffer = ''
- self.ac_out_buffer = ''
+ self.ac_in_buffer = b''
+ self.ac_out_buffer = b''
self.producer_fifo = fifo()
asyncore.dispatcher.__init__ (self, conn, map)
@@ -84,7 +85,6 @@ class async_chat (asyncore.dispatcher):
# if found, transition to the next state.
def handle_read (self):
-
try:
data = self.recv (self.ac_in_buffer_size)
except socket.error:
@@ -104,13 +104,13 @@ class async_chat (asyncore.dispatcher):
if not terminator:
# no terminator, collect it all
self.collect_incoming_data (self.ac_in_buffer)
- self.ac_in_buffer = ''
+ self.ac_in_buffer = b''
elif isinstance(terminator, int) or isinstance(terminator, long):
# numeric terminator
n = terminator
if lb < n:
self.collect_incoming_data (self.ac_in_buffer)
- self.ac_in_buffer = ''
+ self.ac_in_buffer = b''
self.terminator -= lb
else:
self.collect_incoming_data (self.ac_in_buffer[:n])
@@ -147,7 +147,7 @@ class async_chat (asyncore.dispatcher):
else:
# no prefix, collect it all
self.collect_incoming_data (self.ac_in_buffer)
- self.ac_in_buffer = ''
+ self.ac_in_buffer = b''
def handle_write (self):
self.initiate_send ()
@@ -156,7 +156,8 @@ class async_chat (asyncore.dispatcher):
self.close()
def push (self, data):
- self.producer_fifo.push (simple_producer (data))
+ data = as_bytes(data)
+ self.producer_fifo.push(simple_producer(data))
self.initiate_send()
def push_with_producer (self, producer):
@@ -172,7 +173,7 @@ class async_chat (asyncore.dispatcher):
# return len(self.ac_out_buffer) or len(self.producer_fifo) or (not self.connected)
# this is about twice as fast, though not as clear.
return not (
- (self.ac_out_buffer == '') and
+ (self.ac_out_buffer == b'') and
self.producer_fifo.is_empty() and
self.connected
)
@@ -194,7 +195,7 @@ class async_chat (asyncore.dispatcher):
self.producer_fifo.pop()
self.close()
return
- elif isinstance(p, str):
+ elif isinstance(p, bytes):
self.producer_fifo.pop()
self.ac_out_buffer += p
return
@@ -226,8 +227,8 @@ class async_chat (asyncore.dispatcher):
def discard_buffers (self):
# Emergencies only!
- self.ac_in_buffer = ''
- self.ac_out_buffer = ''
+ self.ac_in_buffer = b''
+ self.ac_out_buffer = b''
while self.producer_fifo:
self.producer_fifo.pop()
@@ -245,7 +246,7 @@ class simple_producer:
return result
else:
result = self.data
- self.data = ''
+ self.data = b''
return result
class fifo:
diff --git a/supervisor/medusa/asyncore_25.py b/supervisor/medusa/asyncore_25.py
index 457715c..250c780 100644
--- a/supervisor/medusa/asyncore_25.py
+++ b/supervisor/medusa/asyncore_25.py
@@ -346,7 +346,7 @@ class dispatcher:
# a closed connection is indicated by signaling
# a read condition, and having recv() return 0.
self.handle_close()
- return ''
+ return b''
else:
return data
except socket.error as why:
@@ -449,7 +449,7 @@ class dispatcher_with_send(dispatcher):
def __init__(self, sock=None, map=None):
dispatcher.__init__(self, sock, map)
- self.out_buffer = ''
+ self.out_buffer = b''
def initiate_send(self):
num_sent = dispatcher.send(self, self.out_buffer[:512])
diff --git a/supervisor/medusa/http_server.py b/supervisor/medusa/http_server.py
index b036a42..eee8ee2 100644
--- a/supervisor/medusa/http_server.py
+++ b/supervisor/medusa/http_server.py
@@ -12,6 +12,8 @@ import socket
import sys
import time
+from supervisor.compat import as_bytes
+
# async modules
import supervisor.medusa.asyncore_25 as asyncore
import supervisor.medusa.asynchat_25 as asynchat
@@ -89,8 +91,9 @@ class http_request:
def build_reply_header (self):
header_items = ['%s: %s' % item for item in self.reply_headers.items()]
- return '\r\n'.join (
+ result = '\r\n'.join (
[self.response(self.reply_code)] + header_items) + '\r\n\r\n'
+ return as_bytes(result)
####################################################
# multiple reply header management
@@ -260,11 +263,13 @@ class http_request:
)
def push (self, thing):
- if type(thing) == type(''):
- self.outgoing.append(producers.simple_producer(thing,
- buffer_size=len(thing)))
- else:
- self.outgoing.append(thing)
+ # Sometimes, text gets pushed by XMLRPC logic for later
+ # processing.
+ if isinstance(thing, str):
+ thing = as_bytes(thing)
+ if isinstance(thing, bytes):
+ thing = producers.simple_producer(thing, buffer_size=len(thing))
+ self.outgoing.append(thing)
def response (self, code=200):
message = self.responses[code]
@@ -278,10 +283,11 @@ class http_request:
'code': code,
'message': message,
}
+ s = as_bytes(s)
self['Content-Length'] = len(s)
self['Content-Type'] = 'text/html'
# make an error reply
- self.push (s)
+ self.push(s)
self.done()
# can also be used for empty replies
@@ -471,8 +477,8 @@ class http_channel (asynchat.async_chat):
asynchat.async_chat.__init__ (self, conn)
self.server = server
self.addr = addr
- self.set_terminator ('\r\n\r\n')
- self.in_buffer = ''
+ self.set_terminator (b'\r\n\r\n')
+ self.in_buffer = b''
self.creation_time = int (time.time())
self.last_used = self.creation_time
self.check_maintenance()
@@ -559,8 +565,8 @@ class http_channel (asynchat.async_chat):
self.current_request.found_terminator()
else:
header = self.in_buffer
- self.in_buffer = ''
- lines = header.split('\r\n')
+ self.in_buffer = b''
+ lines = header.split(b'\r\n')
# --------------------------------------------------
# crack the request header
diff --git a/supervisor/medusa/producers.py b/supervisor/medusa/producers.py
index c65a93f..d7ab7ec 100644
--- a/supervisor/medusa/producers.py
+++ b/supervisor/medusa/producers.py
@@ -26,7 +26,7 @@ class simple_producer:
return result
else:
result = self.data
- self.data = ''
+ self.data = b''
return result
class scanning_producer:
@@ -47,7 +47,7 @@ class scanning_producer:
self.pos += len(result)
return result
else:
- return ''
+ return b''
class lines_producer:
"""producer for a list of lines"""
@@ -74,7 +74,7 @@ class buffer_list_producer:
def more (self):
if self.index >= len(self.buffers):
- return ''
+ return b''
else:
data = self.buffers[self.index]
self.index += 1
@@ -92,14 +92,14 @@ class file_producer:
def more (self):
if self.done:
- return ''
+ return b''
else:
data = self.file.read (self.out_buffer_size)
if not data:
self.file.close()
del self.file
self.done = 1
- return ''
+ return b''
else:
return data
@@ -113,7 +113,7 @@ class file_producer:
class output_producer:
"""Acts like an output file; suitable for capturing sys.stdout"""
def __init__ (self):
- self.data = ''
+ self.data = b''
def write (self, data):
lines = data.split('\n')
@@ -154,7 +154,7 @@ class composite_producer:
else:
self.producers.pop(0)
else:
- return ''
+ return b''
class globbing_producer:
@@ -166,7 +166,7 @@ class globbing_producer:
def __init__ (self, producer, buffer_size=1<<16):
self.producer = producer
- self.buffer = ''
+ self.buffer = b''
self.buffer_size = buffer_size
def more (self):
@@ -177,7 +177,7 @@ class globbing_producer:
else:
break
r = self.buffer
- self.buffer = ''
+ self.buffer = b''
return r
@@ -231,15 +231,16 @@ class chunked_producer:
if self.producer:
data = self.producer.more()
if data:
- return '%x\r\n%s\r\n' % (len(data), data)
+ s = '%x' % len(data)
+ return as_bytes(s) + b'\r\n' + data + b'\r\n'
else:
self.producer = None
if self.footers:
- return '\r\n'.join(['0'] + self.footers) + '\r\n\r\n'
+ return b'\r\n'.join([b'0'] + self.footers) + b'\r\n\r\n'
else:
- return '0\r\n\r\n'
+ return b'0\r\n\r\n'
else:
- return ''
+ return b''
try:
import zlib
@@ -265,7 +266,7 @@ class compressed_producer:
def more (self):
if self.producer:
- cdata = ''
+ cdata = b''
# feed until we get some output
while not cdata:
data = self.producer.more()
@@ -276,7 +277,7 @@ class compressed_producer:
cdata = self.compressor.compress (data)
return cdata
else:
- return ''
+ return b''
class escaping_producer:
@@ -287,7 +288,7 @@ class escaping_producer:
self.producer = producer
self.esc_from = esc_from
self.esc_to = esc_to
- self.buffer = ''
+ self.buffer = b''
self.find_prefix_at_end = find_prefix_at_end
def more (self):
@@ -305,7 +306,7 @@ class escaping_producer:
return buffer[:-i]
else:
# no prefix, return it all
- self.buffer = ''
+ self.buffer = b''
return buffer
else:
return buffer
diff --git a/supervisor/medusa/text_socket.py b/supervisor/medusa/text_socket.py
index 2bae802..4fba848 100644
--- a/supervisor/medusa/text_socket.py
+++ b/supervisor/medusa/text_socket.py
@@ -40,6 +40,6 @@ if PY3:
return sock, addr
text_socket.__init__.__doc__ = bin_socket.__init__.__doc__
-
+ text_socket = bin_socket
else:
text_socket = bin_socket
diff --git a/supervisor/medusa/xmlrpc_handler.py b/supervisor/medusa/xmlrpc_handler.py
index f51e686..fecb237 100644
--- a/supervisor/medusa/xmlrpc_handler.py
+++ b/supervisor/medusa/xmlrpc_handler.py
@@ -7,6 +7,8 @@
VERSION = "$Id: xmlrpc_handler.py,v 1.6 2004/04/21 14:09:24 akuchling Exp $"
+from supervisor.compat import as_string
+
import supervisor.medusa.http_server as http_server
try:
import xmlrpclib
@@ -83,8 +85,10 @@ class collector:
def found_terminator (self):
# set the terminator back to the default
- self.request.channel.set_terminator ('\r\n\r\n')
- self.handler.continue_request ("".join(self.data), self.request)
+ self.request.channel.set_terminator (b'\r\n\r\n')
+ # convert the data back to text for processing
+ data = as_string(b''.join(self.data))
+ self.handler.continue_request (data, self.request)
if __name__ == '__main__':
diff --git a/supervisor/options.py b/supervisor/options.py
index 87d83e4..520b3e4 100644
--- a/supervisor/options.py
+++ b/supervisor/options.py
@@ -1521,8 +1521,8 @@ class ServerOptions(Options):
except OSError as why:
if why.args[0] not in (errno.EWOULDBLOCK, errno.EBADF, errno.EINTR):
raise
- data = ''
- return as_string(data)
+ data = b''
+ return data
def process_environment(self):
os.environ.update(self.environment or {})
@@ -2052,7 +2052,7 @@ def tailFile(filename, offset, length):
length = 0
if length == 0:
- data = ''
+ data = b''
else:
f.seek(offset)
data = f.read(length)
diff --git a/supervisor/process.py b/supervisor/process.py
index e2fef5b..10af3bd 100644
--- a/supervisor/process.py
+++ b/supervisor/process.py
@@ -8,6 +8,8 @@ import signal
from supervisor.compat import maxint
from supervisor.compat import total_ordering
from supervisor.compat import as_bytes
+from supervisor.compat import as_string
+from supervisor.compat import PY3
from supervisor.medusa import asyncore_25 as asyncore
@@ -179,8 +181,9 @@ class Subprocess(object):
if self.state not in states:
current_state = getProcessStateDescription(self.state)
allowable_states = ' '.join(map(getProcessStateDescription, states))
+ processname = as_string(self.config.name)
raise AssertionError('Assertion failed for %s: %s not in %s' % (
- self.config.name, current_state, allowable_states))
+ processname, current_state, allowable_states))
def record_spawnerr(self, msg):
self.spawnerr = msg
@@ -192,9 +195,10 @@ class Subprocess(object):
Return the process id. If the fork() call fails, return None.
"""
options = self.config.options
+ processname = as_string(self.config.name)
if self.pid:
- msg = 'process %r already running' % self.config.name
+ msg = 'process \'%s\' already running' % processname
options.logger.warn(msg)
return
@@ -225,10 +229,10 @@ class Subprocess(object):
code = why.args[0]
if code == errno.EMFILE:
# too many file descriptors open
- msg = 'too many open files to spawn %r' % self.config.name
+ msg = 'too many open files to spawn \'%s\'' % processname
else:
- msg = 'unknown error making dispatchers for %r: %s' % (
- self.config.name, errno.errorcode.get(code, code))
+ msg = 'unknown error making dispatchers for \'%s\': %s' % (
+ processname, errno.errorcode.get(code, code))
self.record_spawnerr(msg)
self._assertInState(ProcessStates.STARTING)
self.change_state(ProcessStates.BACKOFF)
@@ -240,11 +244,11 @@ class Subprocess(object):
code = why.args[0]
if code == errno.EAGAIN:
# process table full
- msg = ('Too many processes in process table to spawn %r' %
- self.config.name)
+ msg = ('Too many processes in process table to spawn \'%s\'' %
+ processname)
else:
- msg = 'unknown error during fork for %r: %s' % (
- self.config.name, errno.errorcode.get(code, code))
+ msg = 'unknown error during fork for \'%s\': %s' % (
+ processname, errno.errorcode.get(code, code))
self.record_spawnerr(msg)
self._assertInState(ProcessStates.STARTING)
self.change_state(ProcessStates.BACKOFF)
@@ -263,7 +267,7 @@ class Subprocess(object):
self.pid = pid
options = self.config.options
options.close_child_pipes(self.pipes)
- options.logger.info('spawned: %r with pid %s' % (self.config.name, pid))
+ options.logger.info('spawned: \'%s\' with pid %s' % (as_string(self.config.name), pid))
self.spawnerr = None
self.delay = time.time() + self.config.startsecs
options.pidhistory[pid] = self
@@ -363,7 +367,7 @@ class Subprocess(object):
now = time.time()
if now > (self.laststopreport + 2): # every 2 seconds
self.config.options.logger.info(
- 'waiting for %s to stop' % self.config.name)
+ 'waiting for %s to stop' % as_string(self.config.name))
self.laststopreport = now
def give_up(self):
@@ -382,20 +386,21 @@ class Subprocess(object):
now = time.time()
options = self.config.options
+ processname = as_string(self.config.name)
# If the process is in BACKOFF and we want to stop or kill it, then
# BACKOFF -> STOPPED. This is needed because if startretries is a
# large number and the process isn't starting successfully, the stop
# request would be blocked for a long time waiting for the retries.
if self.state == ProcessStates.BACKOFF:
msg = ("Attempted to kill %s, which is in BACKOFF state." %
- (self.config.name,))
+ processname)
options.logger.debug(msg)
self.change_state(ProcessStates.STOPPED)
return None
if not self.pid:
msg = ("attempted to kill %s with sig %s but it wasn't running" %
- (self.config.name, signame(sig)))
+ (processname, signame(sig)))
options.logger.debug(msg)
return msg
@@ -411,7 +416,7 @@ class Subprocess(object):
as_group = "process group "
options.logger.debug('killing %s (pid %s) %swith signal %s'
- % (self.config.name,
+ % (processname,
self.pid,
as_group,
signame(sig))
@@ -436,7 +441,7 @@ class Subprocess(object):
options.kill(pid, sig)
except:
tb = traceback.format_exc()
- msg = 'unknown problem killing %s (%s):%s' % (self.config.name,
+ msg = 'unknown problem killing %s (%s):%s' % (processname,
self.pid, tb)
options.logger.critical(msg)
self.change_state(ProcessStates.UNKNOWN)
@@ -454,14 +459,15 @@ class Subprocess(object):
if an error occurred or if the subprocess is not running.
"""
options = self.config.options
+ processname = as_string(self.config.name)
if not self.pid:
msg = ("attempted to send %s sig %s but it wasn't running" %
- (self.config.name, signame(sig)))
+ (processname, signame(sig)))
options.logger.debug(msg)
return msg
options.logger.debug('sending %s (pid %s) sig %s'
- % (self.config.name,
+ % (processname,
self.pid,
signame(sig))
)
@@ -475,7 +481,7 @@ class Subprocess(object):
except:
tb = traceback.format_exc()
msg = 'unknown problem sending sig %s (%s):%s' % (
- self.config.name, self.pid, tb)
+ processname, self.pid, tb)
options.logger.critical(msg)
self.change_state(ProcessStates.UNKNOWN)
self.pid = 0
@@ -492,16 +498,16 @@ class Subprocess(object):
now = time.time()
self.laststop = now
- processname = self.config.name
+ processname = as_string(self.config.name)
if now > self.laststart:
too_quickly = now - self.laststart < self.config.startsecs
else:
too_quickly = False
self.config.options.logger.warn(
- "process %r (%s) laststart time is in the future, don't "
+ "process \'%s\' (%s) laststart time is in the future, don't "
"know how long process was running so assuming it did "
- "not exit too quickly" % (self.config.name, self.pid))
+ "not exit too quickly" % (processname, self.pid))
exit_expected = es in self.config.exitcodes
@@ -581,9 +587,14 @@ class Subprocess(object):
return self.config.priority == other.config.priority
def __repr__(self):
+ # repr can't return anything other than a native string,
+ # but the name might be unicode - a problem on Python 2.
+ name = self.config.name
+ if not PY3:
+ name = as_string(name).encode('unicode-escape')
return '<Subprocess at %s with name %s in state %s>' % (
id(self),
- self.config.name,
+ name,
getProcessStateDescription(self.get_state()))
def get_state(self):
@@ -616,6 +627,7 @@ class Subprocess(object):
# BACKOFF -> STARTING
self.spawn()
+ processname = as_string(self.config.name)
if state == ProcessStates.STARTING:
if now - self.laststart > self.config.startsecs:
# STARTING -> RUNNING if the proc has started
@@ -628,7 +640,7 @@ class Subprocess(object):
msg = (
'entered RUNNING state, process has stayed up for '
'> than %s seconds (startsecs)' % self.config.startsecs)
- logger.info('success: %s %s' % (self.config.name, msg))
+ logger.info('success: %s %s' % (processname, msg))
if state == ProcessStates.BACKOFF:
if self.backoff > self.config.startretries:
@@ -637,7 +649,7 @@ class Subprocess(object):
self.give_up()
msg = ('entered FATAL state, too many start retries too '
'quickly')
- logger.info('gave up: %s %s' % (self.config.name, msg))
+ logger.info('gave up: %s %s' % (processname, msg))
elif state == ProcessStates.STOPPING:
time_left = self.delay - now
@@ -646,8 +658,8 @@ class Subprocess(object):
# sigkill. if this doesn't kill it, the process will be stuck
# in the STOPPING state forever.
self.config.options.logger.warn(
- 'killing %r (%s) with SIGKILL' % (self.config.name,
- self.pid))
+ 'killing \'%s\' (%s) with SIGKILL' % (processname,
+ self.pid))
self.kill(signal.SIGKILL)
class FastCGISubprocess(Subprocess):
@@ -727,8 +739,13 @@ class ProcessGroupBase(object):
return self.config.priority == other.config.priority
def __repr__(self):
+ # repr can't return anything other than a native string,
+ # but the name might be unicode - a problem on Python 2.
+ name = self.config.name
+ if not PY3:
+ name = as_string(name).encode('unicode-escape')
return '<%s instance at %s named %s>' % (self.__class__, id(self),
- self.config.name)
+ name)
def removelogs(self):
for process in self.processes.values():
@@ -839,6 +856,7 @@ class EventListenerPool(ProcessGroupBase):
# events are required to be instances
# this has a side effect to fail with an attribute error on 'old style'
# classes
+ processname = as_string(self.config.name)
if not hasattr(event, 'serial'):
event.serial = new_serial(GlobalSerial)
if not hasattr(event, 'pool_serials'):
@@ -848,7 +866,7 @@ class EventListenerPool(ProcessGroupBase):
else:
self.config.options.logger.debug(
'rebuffering event %s for pool %s (bufsize %s)' % (
- (event.serial, self.config.name, len(self.event_buffer))))
+ (event.serial, processname, len(self.event_buffer))))
if len(self.event_buffer) >= self.config.buffer_size:
if self.event_buffer:
@@ -856,7 +874,7 @@ class EventListenerPool(ProcessGroupBase):
discarded_event = self.event_buffer.pop(0)
self.config.options.logger.error(
'pool %s event buffer overflowed, discarding event %s' % (
- (self.config.name, discarded_event.serial)))
+ (processname, discarded_event.serial)))
if head:
self.event_buffer.insert(0, event)
else:
@@ -869,7 +887,8 @@ class EventListenerPool(ProcessGroupBase):
if process.state != ProcessStates.RUNNING:
continue
if process.listener_state == EventListenerStates.READY:
- payload = str(event)
+ processname = as_string(process.config.name)
+ payload = event.payload()
try:
event_type = event.__class__
serial = event.serial
@@ -883,14 +902,14 @@ class EventListenerPool(ProcessGroupBase):
self.config.options.logger.debug(
'epipe occurred while sending event %s '
'to listener %s, listener state unchanged' % (
- event.serial, process.config.name))
+ event.serial, processname))
continue
process.listener_state = EventListenerStates.BUSY
process.event = event
self.config.options.logger.debug(
'event %s sent to listener %s' % (
- event.serial, process.config.name))
+ event.serial, processname))
return True
return False
diff --git a/supervisor/supervisorctl.py b/supervisor/supervisorctl.py
index c2fd5da..aefe841 100755
--- a/supervisor/supervisorctl.py
+++ b/supervisor/supervisorctl.py
@@ -34,6 +34,7 @@ from supervisor.compat import xmlrpclib
from supervisor.compat import urlparse
from supervisor.compat import unicode
from supervisor.compat import raw_input
+from supervisor.compat import as_string
from supervisor.medusa import asyncore_25 as asyncore
@@ -661,7 +662,7 @@ class DefaultControllerPlugin(ControllerPluginBase):
supervisor = self.ctl.get_supervisor()
all_infos = supervisor.getAllProcessInfo()
- names = arg.split()
+ names = as_string(arg).split()
if not names or "all" in names:
matching_infos = all_infos
else:
diff --git a/supervisor/supervisord.py b/supervisor/supervisord.py
index 96fef32..d895e7c 100755
--- a/supervisor/supervisord.py
+++ b/supervisor/supervisord.py
@@ -36,6 +36,7 @@ import signal
from supervisor.medusa import asyncore_25 as asyncore
+from supervisor.compat import as_string
from supervisor.options import ServerOptions
from supervisor.options import signame
from supervisor import events
@@ -148,7 +149,7 @@ class Supervisor:
# throttle 'waiting for x to die' reports
now = time.time()
if now > (self.lastshutdownreport + 3): # every 3 secs
- names = [ p.config.name for p in unstopped ]
+ names = [ as_string(p.config.name) for p in unstopped ]
namestr = ', '.join(names)
self.options.logger.info('waiting for %s to die' % namestr)
self.lastshutdownreport = now
diff --git a/supervisor/tests/base.py b/supervisor/tests/base.py
index eb2e3fe..f2c4979 100644
--- a/supervisor/tests/base.py
+++ b/supervisor/tests/base.py
@@ -1135,7 +1135,7 @@ class DummyStream:
self.error = error
self.closed = False
self.flushed = False
- self.written = ''
+ self.written = b''
self._fileno = fileno
def close(self):
if self.error:
@@ -1150,7 +1150,7 @@ class DummyStream:
error = self.error
self.error = None
raise error
- self.written += as_string(msg)
+ self.written += as_bytes(msg)
def seek(self, num, whence=0):
pass
def tell(self):
@@ -1163,7 +1163,7 @@ class DummyEvent:
if serial is not None:
self.serial = serial
- def __str__(self):
+ def payload(self):
return 'dummy event'
class DummyPoller:
diff --git a/supervisor/tests/fixtures/hello.sh b/supervisor/tests/fixtures/hello.sh
new file mode 100644
index 0000000..4f3aafd
--- /dev/null
+++ b/supervisor/tests/fixtures/hello.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+n=0
+while :; do
+ let n=n+1
+ echo "The Øresund bridge ends in Malmö - $n"
+ sleep 1;
+done
diff --git a/supervisor/tests/fixtures/issue-565.conf b/supervisor/tests/fixtures/issue-565.conf
new file mode 100644
index 0000000..4ad290a
--- /dev/null
+++ b/supervisor/tests/fixtures/issue-565.conf
@@ -0,0 +1,22 @@
+[supervisord]
+loglevel=info ; log level; default info; others: debug,warn,trace
+logfile=/tmp/supervisord.log ; main log file; default $CWD/supervisord.log
+pidfile=/tmp/supervisord.pid ; supervisord pidfile; default supervisord.pid
+nodaemon=true ; start in foreground if true; default false
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[unix_http_server]
+file=/tmp/supervisor.sock ; the path to the socket file
+
+[supervisorctl]
+serverurl=unix:///tmp/supervisor.sock ; use a unix:// URL for a unix socket
+
+[program:hello]
+command=bash supervisor/tests/fixtures/hello.sh
+stdout_events_enabled=true
+
+[eventlistener:listener]
+command=python supervisor/tests/fixtures/listener.py
+events=PROCESS_LOG
diff --git a/supervisor/tests/fixtures/issue-638.conf b/supervisor/tests/fixtures/issue-638.conf
new file mode 100644
index 0000000..eaaca8b
--- /dev/null
+++ b/supervisor/tests/fixtures/issue-638.conf
@@ -0,0 +1,17 @@
+[supervisord]
+loglevel=debug ; log level; default info; others: debug,warn,trace
+logfile=/tmp/supervisord.log ; main log file; default $CWD/supervisord.log
+pidfile=/tmp/supervisord.pid ; supervisord pidfile; default supervisord.pid
+nodaemon=true ; start in foreground if true; default false
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[unix_http_server]
+file=/tmp/supervisor.sock ; the path to the socket file
+
+[supervisorctl]
+serverurl=unix:///tmp/supervisor.sock ; use a unix:// URL for a unix socket
+
+[program:produce-unicode-error]
+command=echo -e "\x88"
diff --git a/supervisor/tests/fixtures/issue-663.conf b/supervisor/tests/fixtures/issue-663.conf
new file mode 100644
index 0000000..2e979c9
--- /dev/null
+++ b/supervisor/tests/fixtures/issue-663.conf
@@ -0,0 +1,18 @@
+[supervisord]
+loglevel=debug
+logfile=/tmp/supervisord.log
+pidfile=/tmp/supervisord.pid
+nodaemon=true
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[inet_http_server]
+port = 127.0.0.1:9001
+
+[supervisorctl]
+serverurl = http://127.0.0.1:9001
+
+[eventlistener:listener]
+command=python /home/vinay/Documents/listener.py
+events=TICK_5
diff --git a/supervisor/tests/fixtures/issue-664.conf b/supervisor/tests/fixtures/issue-664.conf
new file mode 100644
index 0000000..a9361ff
--- /dev/null
+++ b/supervisor/tests/fixtures/issue-664.conf
@@ -0,0 +1,17 @@
+[supervisord]
+loglevel=debug
+logfile=/tmp/supervisord.log
+pidfile=/tmp/supervisord.pid
+nodaemon=true
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[inet_http_server]
+port = 127.0.0.1:9001
+
+[supervisorctl]
+serverurl = http://127.0.0.1:9001
+
+[program:test_öäü]
+command = /bin/cat
diff --git a/supervisor/tests/fixtures/issue-836.conf b/supervisor/tests/fixtures/issue-836.conf
new file mode 100644
index 0000000..f5b6d17
--- /dev/null
+++ b/supervisor/tests/fixtures/issue-836.conf
@@ -0,0 +1,17 @@
+[supervisord]
+loglevel = debug
+logfile=/tmp/supervisord.log
+pidfile=/tmp/supervisord.pid
+nodaemon = true
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[inet_http_server]
+port = 127.0.0.1:9001
+
+[supervisorctl]
+serverurl = http://127.0.0.1:9001
+
+[program:cat]
+command = /bin/cat
diff --git a/supervisor/tests/fixtures/listener.py b/supervisor/tests/fixtures/listener.py
new file mode 100644
index 0000000..f69915c
--- /dev/null
+++ b/supervisor/tests/fixtures/listener.py
@@ -0,0 +1,38 @@
+
+import sys
+
+def write_and_flush(stream, s):
+ stream.write(s)
+ stream.flush()
+
+def write_stdout(s):
+ # only eventlistener protocol messages may be sent to stdout
+ sys.stdout.write(s)
+ sys.stdout.flush()
+
+def write_stderr(s):
+ sys.stderr.write(s)
+ sys.stderr.flush()
+
+def main():
+ stdin = sys.stdin
+ stdout = sys.stdout
+ stderr = sys.stderr
+ while True:
+ # transition from ACKNOWLEDGED to READY
+ write_and_flush(stdout, 'READY\n')
+
+ # read header line and print it to stderr
+ line = stdin.readline()
+ write_and_flush(stderr, line)
+
+ # read event payload and print it to stderr
+ headers = dict([ x.split(':') for x in line.split() ])
+ data = stdin.read(int(headers['len']))
+ write_and_flush(stderr, data)
+
+ # transition from READY to ACKNOWLEDGED
+ write_and_flush(stdout, 'RESULT 2\nOK')
+
+if __name__ == '__main__':
+ main()
diff --git a/supervisor/tests/test_childutils.py b/supervisor/tests/test_childutils.py
index c3ffd09..f2b39d8 100644
--- a/supervisor/tests/test_childutils.py
+++ b/supervisor/tests/test_childutils.py
@@ -1,7 +1,9 @@
+from io import BytesIO
import sys
import time
import unittest
from supervisor.compat import StringIO
+from supervisor.compat import as_string
class ChildUtilsTests(unittest.TestCase):
def test_getRPCInterface(self):
@@ -49,23 +51,23 @@ class ChildUtilsTests(unittest.TestCase):
class TestProcessCommunicationsProtocol(unittest.TestCase):
def test_send(self):
from supervisor.childutils import pcomm
- stdout = StringIO()
- pcomm.send('hello', stdout)
+ stdout = BytesIO()
+ pcomm.send(b'hello', stdout)
from supervisor.events import ProcessCommunicationEvent
begin = ProcessCommunicationEvent.BEGIN_TOKEN
end = ProcessCommunicationEvent.END_TOKEN
- self.assertEqual(stdout.getvalue(), '%s%s%s' % (begin, 'hello', end))
+ self.assertEqual(stdout.getvalue(), begin + b'hello' + end)
def test_stdout(self):
from supervisor.childutils import pcomm
old = sys.stdout
try:
- io = sys.stdout = StringIO()
- pcomm.stdout('hello')
+ io = sys.stdout = BytesIO()
+ pcomm.stdout(b'hello')
from supervisor.events import ProcessCommunicationEvent
begin = ProcessCommunicationEvent.BEGIN_TOKEN
end = ProcessCommunicationEvent.END_TOKEN
- self.assertEqual(io.getvalue(), '%s%s%s' % (begin, 'hello', end))
+ self.assertEqual(io.getvalue(), begin + b'hello' + end)
finally:
sys.stdout = old
@@ -73,12 +75,12 @@ class TestProcessCommunicationsProtocol(unittest.TestCase):
from supervisor.childutils import pcomm
old = sys.stderr
try:
- io = sys.stderr = StringIO()
- pcomm.stderr('hello')
+ io = sys.stderr = BytesIO()
+ pcomm.stderr(b'hello')
from supervisor.events import ProcessCommunicationEvent
begin = ProcessCommunicationEvent.BEGIN_TOKEN
end = ProcessCommunicationEvent.END_TOKEN
- self.assertEqual(io.getvalue(), '%s%s%s' % (begin, 'hello', end))
+ self.assertEqual(io.getvalue(), begin + b'hello' + end)
finally:
sys.stderr = old
@@ -100,7 +102,7 @@ class TestEventListenerProtocol(unittest.TestCase):
def test_token(self):
from supervisor.childutils import listener
from supervisor.dispatchers import PEventListenerDispatcher
- token = PEventListenerDispatcher.READY_FOR_EVENTS_TOKEN
+ token = as_string(PEventListenerDispatcher.READY_FOR_EVENTS_TOKEN)
stdout = StringIO()
listener.ready(stdout)
self.assertEqual(stdout.getvalue(), token)
@@ -108,7 +110,7 @@ class TestEventListenerProtocol(unittest.TestCase):
def test_ok(self):
from supervisor.childutils import listener
from supervisor.dispatchers import PEventListenerDispatcher
- begin = PEventListenerDispatcher.RESULT_TOKEN_START
+ begin = as_string(PEventListenerDispatcher.RESULT_TOKEN_START)
stdout = StringIO()
listener.ok(stdout)
self.assertEqual(stdout.getvalue(), begin + '2\nOK')
@@ -116,7 +118,7 @@ class TestEventListenerProtocol(unittest.TestCase):
def test_fail(self):
from supervisor.childutils import listener
from supervisor.dispatchers import PEventListenerDispatcher
- begin = PEventListenerDispatcher.RESULT_TOKEN_START
+ begin = as_string(PEventListenerDispatcher.RESULT_TOKEN_START)
stdout = StringIO()
listener.fail(stdout)
self.assertEqual(stdout.getvalue(), begin + '4\nFAIL')
@@ -124,7 +126,7 @@ class TestEventListenerProtocol(unittest.TestCase):
def test_send(self):
from supervisor.childutils import listener
from supervisor.dispatchers import PEventListenerDispatcher
- begin = PEventListenerDispatcher.RESULT_TOKEN_START
+ begin = as_string(PEventListenerDispatcher.RESULT_TOKEN_START)
stdout = StringIO()
msg = 'the body data ya fool\n'
listener.send(msg, stdout)
diff --git a/supervisor/tests/test_dispatchers.py b/supervisor/tests/test_dispatchers.py
index 52a00f3..45af0f2 100644
--- a/supervisor/tests/test_dispatchers.py
+++ b/supervisor/tests/test_dispatchers.py
@@ -2,6 +2,8 @@ import unittest
import os
import sys
+from supervisor.compat import as_bytes
+
from supervisor.tests.base import DummyOptions
from supervisor.tests.base import DummyProcess
from supervisor.tests.base import DummyPConfig
@@ -88,24 +90,24 @@ class POutputDispatcherTests(unittest.TestCase):
def test_handle_read_event(self):
options = DummyOptions()
- options.readfd_result = 'abc'
+ options.readfd_result = b'abc'
config = DummyPConfig(options, 'process1', '/bin/process1',
stdout_capture_maxbytes=100)
process = DummyProcess(config)
dispatcher = self._makeOne(process)
self.assertEqual(dispatcher.handle_read_event(), None)
- self.assertEqual(dispatcher.output_buffer, 'abc')
+ self.assertEqual(dispatcher.output_buffer, b'abc')
def test_handle_read_event_no_data_closes(self):
options = DummyOptions()
- options.readfd_result = ''
+ options.readfd_result = b''
config = DummyPConfig(options, 'process1', '/bin/process1',
stdout_capture_maxbytes=100)
process = DummyProcess(config)
dispatcher = self._makeOne(process)
self.assertFalse(dispatcher.closed)
self.assertEqual(dispatcher.handle_read_event(), None)
- self.assertEqual(dispatcher.output_buffer, '')
+ self.assertEqual(dispatcher.output_buffer, b'')
self.assertTrue(dispatcher.closed)
def test_handle_error(self):
@@ -180,7 +182,7 @@ class POutputDispatcherTests(unittest.TestCase):
self.assertEqual(dispatcher.childlog.data, ['a'])
self.assertEqual(options.logger.data[0],
"'process1' stdout output:\na")
- self.assertEqual(dispatcher.output_buffer, '')
+ self.assertEqual(dispatcher.output_buffer, b'')
def test_record_output_emits_stdout_event_when_enabled(self):
options = DummyOptions()
@@ -188,7 +190,7 @@ class POutputDispatcherTests(unittest.TestCase):
stdout_events_enabled=True)
process = DummyProcess(config)
dispatcher = self._makeOne(process, 'stdout')
- dispatcher.output_buffer = 'hello from stdout'
+ dispatcher.output_buffer = b'hello from stdout'
L = []
def doit(event):
@@ -200,7 +202,7 @@ class POutputDispatcherTests(unittest.TestCase):
self.assertEqual(len(L), 1)
event = L[0]
self.assertEqual(event.process, process)
- self.assertEqual(event.data, 'hello from stdout')
+ self.assertEqual(event.data, b'hello from stdout')
def test_record_output_does_not_emit_stdout_event_when_disabled(self):
options = DummyOptions()
@@ -208,7 +210,7 @@ class POutputDispatcherTests(unittest.TestCase):
stdout_events_enabled=False)
process = DummyProcess(config)
dispatcher = self._makeOne(process, 'stdout')
- dispatcher.output_buffer = 'hello from stdout'
+ dispatcher.output_buffer = b'hello from stdout'
L = []
def doit(event):
@@ -225,7 +227,7 @@ class POutputDispatcherTests(unittest.TestCase):
stderr_events_enabled=True)
process = DummyProcess(config)
dispatcher = self._makeOne(process, 'stderr')
- dispatcher.output_buffer = 'hello from stderr'
+ dispatcher.output_buffer = b'hello from stderr'
L = []
def doit(event):
@@ -237,7 +239,7 @@ class POutputDispatcherTests(unittest.TestCase):
self.assertEqual(len(L), 1)
event = L[0]
self.assertEqual(event.process, process)
- self.assertEqual(event.data, 'hello from stderr')
+ self.assertEqual(event.data, b'hello from stderr')
def test_record_output_does_not_emit_stderr_event_when_disabled(self):
options = DummyOptions()
@@ -245,7 +247,7 @@ class POutputDispatcherTests(unittest.TestCase):
stderr_events_enabled=False)
process = DummyProcess(config)
dispatcher = self._makeOne(process, 'stderr')
- dispatcher.output_buffer = 'hello from stderr'
+ dispatcher.output_buffer = b'hello from stderr'
L = []
def doit(event):
@@ -268,10 +270,10 @@ class POutputDispatcherTests(unittest.TestCase):
stdout_capture_maxbytes=100)
process = DummyProcess(config)
dispatcher = self._makeOne(process)
- dispatcher.output_buffer = 'stdout string longer than a token'
+ dispatcher.output_buffer = b'stdout string longer than a token'
dispatcher.record_output()
self.assertEqual(dispatcher.childlog.data,
- ['stdout string longer than a token'])
+ [b'stdout string longer than a token'])
self.assertEqual(options.logger.data[0],
"'process1' stdout output:\nstdout string longer than a token")
@@ -301,7 +303,7 @@ class POutputDispatcherTests(unittest.TestCase):
subscribe(ProcessCommunicationEvent, doit)
BEGIN_TOKEN = ProcessCommunicationEvent.BEGIN_TOKEN
END_TOKEN = ProcessCommunicationEvent.END_TOKEN
- data = BEGIN_TOKEN + 'hello' + END_TOKEN
+ data = BEGIN_TOKEN + b'hello' + END_TOKEN
options = DummyOptions()
from supervisor.loggers import getLogger
options.getLogger = getLogger # actually use real logger
@@ -324,7 +326,7 @@ class POutputDispatcherTests(unittest.TestCase):
self.assertEqual(event.__class__, ProcessCommunicationStdoutEvent)
self.assertEqual(event.process, process)
self.assertEqual(event.channel, 'stdout')
- self.assertEqual(event.data, 'hello')
+ self.assertEqual(event.data, b'hello')
finally:
try:
@@ -343,16 +345,17 @@ class POutputDispatcherTests(unittest.TestCase):
subscribe(ProcessCommunicationEvent, doit)
import string
# ascii_letters for python 3
- letters = getattr(string, "letters", string.ascii_letters)
- digits = string.digits * 4
+ letters = as_bytes(getattr(string, "letters", string.ascii_letters))
+ digits = as_bytes(string.digits) * 4
BEGIN_TOKEN = ProcessCommunicationEvent.BEGIN_TOKEN
END_TOKEN = ProcessCommunicationEvent.END_TOKEN
data = (letters + BEGIN_TOKEN + digits + END_TOKEN + letters)
# boundaries that split tokens
- broken = data.split(':')
- first = broken[0] + ':'
- second = broken[1] + ':'
+ colon = b':'
+ broken = data.split(colon)
+ first = broken[0] + colon
+ second = broken[1] + colon
third = broken[2]
options = DummyOptions()
@@ -368,7 +371,7 @@ class POutputDispatcherTests(unittest.TestCase):
dispatcher.output_buffer = first
dispatcher.record_output()
[ x.flush() for x in dispatcher.childlog.handlers ]
- with open(logfile, 'r') as f:
+ with open(logfile, 'rb') as f:
self.assertEqual(f.read(), letters)
self.assertEqual(dispatcher.output_buffer, first[len(letters):])
self.assertEqual(len(events), 0)
@@ -377,7 +380,7 @@ class POutputDispatcherTests(unittest.TestCase):
dispatcher.record_output()
self.assertEqual(len(events), 0)
[ x.flush() for x in dispatcher.childlog.handlers ]
- with open(logfile, 'r') as f:
+ with open(logfile, 'rb') as f:
self.assertEqual(f.read(), letters)
self.assertEqual(dispatcher.output_buffer, first[len(letters):])
self.assertEqual(len(events), 0)
@@ -385,7 +388,7 @@ class POutputDispatcherTests(unittest.TestCase):
dispatcher.output_buffer += third
dispatcher.record_output()
[ x.flush() for x in dispatcher.childlog.handlers ]
- with open(logfile, 'r') as f:
+ with open(logfile, 'rb') as f:
self.assertEqual(f.read(), letters * 2)
self.assertEqual(len(events), 1)
event = events[0]
@@ -410,8 +413,8 @@ class POutputDispatcherTests(unittest.TestCase):
stdout_logfile='/tmp/foo')
process = DummyProcess(config)
dispatcher = self._makeOne(process)
- ansi = '\x1b[34mHello world... this is longer than a token!\x1b[0m'
- noansi = 'Hello world... this is longer than a token!'
+ ansi = b'\x1b[34mHello world... this is longer than a token!\x1b[0m'
+ noansi = b'Hello world... this is longer than a token!'
dispatcher.output_buffer = ansi
dispatcher.record_output()
@@ -568,9 +571,9 @@ class PInputDispatcherTests(unittest.TestCase):
config = DummyPConfig(options, 'test', '/test')
process = DummyProcess(config)
dispatcher = self._makeOne(process)
- self.assertEqual(dispatcher.input_buffer, '')
+ self.assertEqual(dispatcher.input_buffer, b'')
dispatcher.handle_write_event()
- self.assertEqual(dispatcher.input_buffer, '')
+ self.assertEqual(dispatcher.input_buffer, b'')
self.assertEqual(options.written, {})
def test_handle_write_event_epipe_raised(self):
@@ -582,7 +585,7 @@ class PInputDispatcherTests(unittest.TestCase):
import errno
options.write_error = errno.EPIPE
dispatcher.handle_write_event()
- self.assertEqual(dispatcher.input_buffer, '')
+ self.assertEqual(dispatcher.input_buffer, b'')
self.assertTrue(options.logger.data[0].startswith(
'fd 0 closed, stopped monitoring'))
self.assertTrue(options.logger.data[0].endswith('(stdin)>'))
@@ -706,7 +709,7 @@ class PEventListenerDispatcherTests(unittest.TestCase):
options.readfd_result = dispatcher.READY_FOR_EVENTS_TOKEN
self.assertEqual(dispatcher.handle_read_event(), None)
self.assertEqual(process.listener_state, EventListenerStates.READY)
- self.assertEqual(dispatcher.state_buffer, '')
+ self.assertEqual(dispatcher.state_buffer, b'')
self.assertEqual(len(dispatcher.childlog.data), 1)
self.assertEqual(dispatcher.childlog.data[0],
dispatcher.READY_FOR_EVENTS_TOKEN)
@@ -718,14 +721,14 @@ class PEventListenerDispatcherTests(unittest.TestCase):
process = DummyProcess(config)
dispatcher = self._makeOne(process)
self.assertEqual(dispatcher.handle_read_event(), None)
- self.assertEqual(dispatcher.state_buffer, '')
+ self.assertEqual(dispatcher.state_buffer, b'')
from supervisor.dispatchers import EventListenerStates
self.assertEqual(dispatcher.process.listener_state,
EventListenerStates.ACKNOWLEDGED)
def test_handle_read_event_logging_nologs(self):
options = DummyOptions()
- options.readfd_result = 'supercalifragilisticexpialidocious'
+ options.readfd_result = b'supercalifragilisticexpialidocious'
config = DummyPConfig(options, 'process1', '/bin/process1')
process = DummyProcess(config)
dispatcher = self._makeOne(process)
@@ -736,7 +739,7 @@ class PEventListenerDispatcherTests(unittest.TestCase):
def test_handle_read_event_logging_childlog(self):
options = DummyOptions()
- options.readfd_result = 'supercalifragilisticexpialidocious'
+ options.readfd_result = b'supercalifragilisticexpialidocious'
config = DummyPConfig(options, 'process1', '/bin/process1',
stdout_logfile='/tmp/foo')
process = DummyProcess(config)
@@ -744,7 +747,7 @@ class PEventListenerDispatcherTests(unittest.TestCase):
self.assertEqual(dispatcher.handle_read_event(), None)
self.assertEqual(len(dispatcher.childlog.data), 1)
self.assertEqual(dispatcher.childlog.data[0],
- 'supercalifragilisticexpialidocious')
+ b'supercalifragilisticexpialidocious')
def test_handle_listener_state_change_from_unknown(self):
options = DummyOptions()
@@ -753,9 +756,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
from supervisor.dispatchers import EventListenerStates
dispatcher = self._makeOne(process)
process.listener_state = EventListenerStates.UNKNOWN
- dispatcher.state_buffer = 'whatever'
+ dispatcher.state_buffer = b'whatever'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, '')
+ self.assertEqual(dispatcher.state_buffer, b'')
self.assertEqual(options.logger.data, [])
self.assertEqual(process.listener_state, EventListenerStates.UNKNOWN)
@@ -766,9 +769,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
from supervisor.dispatchers import EventListenerStates
dispatcher = self._makeOne(process)
process.listener_state = EventListenerStates.ACKNOWLEDGED
- dispatcher.state_buffer = 'READY\n'
+ dispatcher.state_buffer = b'READY\n'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, '')
+ self.assertEqual(dispatcher.state_buffer, b'')
self.assertEqual(options.logger.data[0],
'process1: ACKNOWLEDGED -> READY')
self.assertEqual(process.listener_state, EventListenerStates.READY)
@@ -780,9 +783,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
from supervisor.dispatchers import EventListenerStates
dispatcher = self._makeOne(process)
process.listener_state = EventListenerStates.ACKNOWLEDGED
- dispatcher.state_buffer = 'READY\ngarbage\n'
+ dispatcher.state_buffer = b'READY\ngarbage\n'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, '')
+ self.assertEqual(dispatcher.state_buffer, b'')
self.assertEqual(options.logger.data[0],
'process1: ACKNOWLEDGED -> READY')
self.assertEqual(options.logger.data[1],
@@ -796,9 +799,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
from supervisor.dispatchers import EventListenerStates
dispatcher = self._makeOne(process)
process.listener_state = EventListenerStates.ACKNOWLEDGED
- dispatcher.state_buffer = 'RE'
+ dispatcher.state_buffer = b'RE'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, 'RE')
+ self.assertEqual(dispatcher.state_buffer, b'RE')
self.assertEqual(options.logger.data, [])
self.assertEqual(process.listener_state,
EventListenerStates.ACKNOWLEDGED)
@@ -810,9 +813,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
from supervisor.dispatchers import EventListenerStates
dispatcher = self._makeOne(process)
process.listener_state = EventListenerStates.ACKNOWLEDGED
- dispatcher.state_buffer = 'bogus data yo'
+ dispatcher.state_buffer = b'bogus data yo'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, '')
+ self.assertEqual(dispatcher.state_buffer, b'')
self.assertEqual(options.logger.data[0],
'process1: ACKNOWLEDGED -> UNKNOWN')
self.assertEqual(options.logger.data[1],
@@ -828,9 +831,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
from supervisor.dispatchers import EventListenerStates
dispatcher = self._makeOne(process)
process.listener_state = EventListenerStates.READY
- dispatcher.state_buffer = 'bogus data yo'
+ dispatcher.state_buffer = b'bogus data yo'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, '')
+ self.assertEqual(dispatcher.state_buffer, b'')
self.assertEqual(options.logger.data[0],
'process1: READY -> UNKNOWN')
self.assertEqual(options.logger.data[1],
@@ -846,9 +849,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
from supervisor.dispatchers import EventListenerStates
dispatcher = self._makeOne(process)
process.listener_state = EventListenerStates.BUSY
- dispatcher.state_buffer = 'bogus data yo'
+ dispatcher.state_buffer = b'bogus data yo'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, 'bogus data yo')
+ self.assertEqual(dispatcher.state_buffer, b'bogus data yo')
self.assertEqual(process.listener_state, EventListenerStates.BUSY)
def test_handle_listener_state_change_busy_to_acknowledged_procd(self):
@@ -864,9 +867,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
process.group.config = Dummy()
from supervisor.dispatchers import default_handler
process.group.config.result_handler = default_handler
- dispatcher.state_buffer = 'RESULT 2\nOKabc'
+ dispatcher.state_buffer = b'RESULT 2\nOKabc'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, 'abc')
+ self.assertEqual(dispatcher.state_buffer, b'abc')
self.assertEqual(options.logger.data[0],
'process1: event was processed')
self.assertEqual(options.logger.data[1],
@@ -887,9 +890,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
process.group.config = Dummy()
from supervisor.dispatchers import default_handler
process.group.config.result_handler = default_handler
- dispatcher.state_buffer = 'RESULT 4\nFAILabc'
+ dispatcher.state_buffer = b'RESULT 4\nFAILabc'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, 'abc')
+ self.assertEqual(dispatcher.state_buffer, b'abc')
self.assertEqual(options.logger.data[0],
'process1: event was rejected')
self.assertEqual(options.logger.data[1],
@@ -912,9 +915,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
process.listener_state = EventListenerStates.BUSY
current_event = DummyEvent()
process.event = current_event
- dispatcher.state_buffer = 'bogus data\n'
+ dispatcher.state_buffer = b'bogus data\n'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, '')
+ self.assertEqual(dispatcher.state_buffer, b'')
self.assertEqual(options.logger.data[0],
"process1: bad result line: 'bogus data'")
self.assertEqual(options.logger.data[1],
@@ -941,9 +944,9 @@ class PEventListenerDispatcherTests(unittest.TestCase):
process.group.config = Dummy()
from supervisor.dispatchers import default_handler
process.group.config.result_handler = default_handler
- dispatcher.state_buffer = 'RESULT 2\nOKbogus data\n'
+ dispatcher.state_buffer = b'RESULT 2\nOKbogus data\n'
self.assertEqual(dispatcher.handle_listener_state_change(), None)
- self.assertEqual(dispatcher.state_buffer, '')
+ self.assertEqual(dispatcher.state_buffer, b'')
self.assertEqual(options.logger.data[0],
'process1: event was processed')
self.assertEqual(options.logger.data[1],
@@ -1091,8 +1094,8 @@ class PEventListenerDispatcherTests(unittest.TestCase):
stdout_logfile='/tmp/foo')
process = DummyProcess(config)
dispatcher = self._makeOne(process)
- ansi = '\x1b[34mHello world... this is longer than a token!\x1b[0m'
- noansi = 'Hello world... this is longer than a token!'
+ ansi = b'\x1b[34mHello world... this is longer than a token!\x1b[0m'
+ noansi = b'Hello world... this is longer than a token!'
options.readfd_result = ansi
dispatcher.handle_read_event()
@@ -1156,15 +1159,15 @@ class stripEscapeTests(unittest.TestCase):
return stripEscapes(s)
def test_zero_length_string(self):
- self.assertEqual(self._callFUT(''), '')
+ self.assertEqual(self._callFUT(b''), b'')
def test_ansi(self):
- ansi = '\x1b[34mHello world... this is longer than a token!\x1b[0m'
- noansi = 'Hello world... this is longer than a token!'
+ ansi = b'\x1b[34mHello world... this is longer than a token!\x1b[0m'
+ noansi = b'Hello world... this is longer than a token!'
self.assertEqual(self._callFUT(ansi), noansi)
def test_noansi(self):
- noansi = 'Hello world... this is longer than a token!'
+ noansi = b'Hello world... this is longer than a token!'
self.assertEqual(self._callFUT(noansi), noansi)
def test_suite():
diff --git a/supervisor/tests/test_end_to_end.py b/supervisor/tests/test_end_to_end.py
new file mode 100644
index 0000000..8953894
--- /dev/null
+++ b/supervisor/tests/test_end_to_end.py
@@ -0,0 +1,114 @@
+# ~*~ coding: utf-8 ~*~
+from __future__ import unicode_literals
+
+import sys
+import signal
+import unittest
+
+try:
+ from xmlrpc.client import ServerProxy
+except ImportError:
+ from xmlrpclib import ServerProxy
+
+try:
+ import pexpect
+except ImportError:
+ pexpect = None
+
+
+class TestEndToEnd(unittest.TestCase):
+
+ @unittest.skipUnless(pexpect, 'This test needs the pexpect library')
+ def test_issue_565(self):
+ args = '-m supervisor.supervisord -c supervisor/tests/fixtures/issue-565.conf'.split()
+ supervisord = pexpect.spawn(sys.executable, args, encoding='utf-8')
+ self.addCleanup(supervisord.kill, signal.SIGINT)
+ supervisord.expect_exact('success: hello entered RUNNING state')
+
+ args = '-m supervisor.supervisorctl -c supervisor/tests/fixtures/issue-565.conf tail -f hello'.split()
+ supervisorctl = pexpect.spawn(sys.executable, args, encoding='utf-8')
+ self.addCleanup(supervisorctl.kill, signal.SIGINT)
+
+ for i in range(1, 4):
+ line = 'The Øresund bridge ends in Malmö - %d' % i
+ supervisorctl.expect_exact(line, timeout=2)
+
+ @unittest.skipUnless(pexpect, 'This test needs the pexpect library')
+ def test_issue_638(self):
+ args = '-m supervisor.supervisord -c supervisor/tests/fixtures/issue-638.conf'.split()
+ supervisord = pexpect.spawn(sys.executable, args, encoding='utf-8')
+ self.addCleanup(supervisord.kill, signal.SIGINT)
+ is_py2 = sys.version_info[0] < 3
+ if is_py2:
+ b_prefix = ''
+ else:
+ b_prefix = 'b'
+ supervisord.expect_exact(r"Undecodable: %s'\x88\n'" % b_prefix, timeout=2)
+ supervisord.expect('received SIGCH?LD indicating a child quit', timeout=5)
+ if is_py2:
+ # need to investigate why this message is only printed under 2.x
+ supervisord.expect_exact('gave up: produce-unicode-error entered FATAL state, '
+ 'too many start retries too quickly', timeout=10)
+
+ @unittest.skipUnless(pexpect, 'This test needs the pexpect library')
+ def test_issue_663(self):
+ args = '-m supervisor.supervisord -c supervisor/tests/fixtures/issue-663.conf'.split()
+ supervisord = pexpect.spawn(sys.executable, args, encoding='utf-8')
+ self.addCleanup(supervisord.kill, signal.SIGINT)
+ for i in range(2):
+ supervisord.expect_exact('OKREADY', timeout=10)
+ supervisord.expect_exact('BUSY -> ACKNOWLEDGED', timeout=2)
+
+ @unittest.skipUnless(pexpect, 'This test needs the pexpect library')
+ def test_issue_664(self):
+ args = '-m supervisor.supervisord -c supervisor/tests/fixtures/issue-664.conf'.split()
+ supervisord = pexpect.spawn(sys.executable, args, encoding='utf-8')
+ self.addCleanup(supervisord.kill, signal.SIGINT)
+ supervisord.expect_exact('test_öäü entered RUNNING state', timeout=10)
+ args = '-m supervisor.supervisorctl -c supervisor/tests/fixtures/issue-664.conf status'.split()
+ supervisorctl = pexpect.spawn(sys.executable, args, encoding='utf-8')
+ self.addCleanup(supervisorctl.kill, signal.SIGINT)
+ try:
+ supervisorctl.expect('test_öäü\s+RUNNING', timeout=5)
+ seen = True
+ except pexpect.ExceptionPexpect:
+ seen = False
+ self.assertTrue(seen)
+
+ @unittest.skipUnless(pexpect, 'This test needs the pexpect library')
+ def test_issue_835(self):
+ args = '-m supervisor.supervisord -c supervisor/tests/fixtures/issue-836.conf'.split()
+ supervisord = pexpect.spawn(sys.executable, args, encoding='utf-8')
+ self.addCleanup(supervisord.kill, signal.SIGINT)
+ supervisord.expect_exact('cat entered RUNNING state', timeout=10)
+ server = ServerProxy('http://127.0.0.1:9001/RPC2')
+ for s in ('The Øresund bridge ends in Malmö', 'hello'):
+ result = server.supervisor.sendProcessStdin('cat', s)
+ self.assertTrue(result)
+ supervisord.expect_exact(s, timeout=5)
+ server('close')()
+
+ @unittest.skipUnless(pexpect, 'This test needs the pexpect library')
+ def test_issue_836(self):
+ args = '-m supervisor.supervisord -c supervisor/tests/fixtures/issue-836.conf'.split()
+ supervisord = pexpect.spawn(sys.executable, args, encoding='utf-8')
+ self.addCleanup(supervisord.kill, signal.SIGINT)
+ supervisord.expect_exact('cat entered RUNNING state', timeout=10)
+ args = '-m supervisor.supervisorctl -c supervisor/tests/fixtures/issue-836.conf fg cat'.split()
+ supervisorctl = pexpect.spawn(sys.executable, args, encoding='utf-8')
+ self.addCleanup(supervisorctl.kill, signal.SIGINT)
+
+ # TODO investigate - failure
+ try:
+ for s in ('Hi', 'Hello', 'The Øresund bridge ends in Malmö'):
+ supervisorctl.sendline(s)
+ supervisord.expect_exact(s, timeout=10)
+ supervisorctl.expect_exact(s) # echoed locally
+ supervisorctl.expect_exact(s) # sent back by supervisord
+ seen = True
+ except pexpect.ExceptionPexpect as e:
+ seen = False
+ self.assertTrue(seen)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/supervisor/tests/test_events.py b/supervisor/tests/test_events.py
index 8b26cb5..15b00da 100644
--- a/supervisor/tests/test_events.py
+++ b/supervisor/tests/test_events.py
@@ -273,7 +273,7 @@ class TestSerializations(unittest.TestCase):
config = pconfig1
process1.group = DummyGroup
event = ProcessLogStdoutEvent(process1, 1, 'yo')
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers['processname'], 'process1', headers)
self.assertEqual(headers['groupname'], 'process1', headers)
self.assertEqual(headers['pid'], '1', headers)
@@ -288,7 +288,7 @@ class TestSerializations(unittest.TestCase):
config = pconfig1
process1.group = DummyGroup
event = ProcessLogStderrEvent(process1, 1, 'yo')
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers['processname'], 'process1', headers)
self.assertEqual(headers['groupname'], 'process1', headers)
self.assertEqual(headers['pid'], '1', headers)
@@ -303,7 +303,7 @@ class TestSerializations(unittest.TestCase):
config = pconfig1
process1.group = DummyGroup
event = ProcessCommunicationStdoutEvent(process1, 1, 'yo')
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers['processname'], 'process1', headers)
self.assertEqual(headers['groupname'], 'process1', headers)
self.assertEqual(headers['pid'], '1', headers)
@@ -318,7 +318,7 @@ class TestSerializations(unittest.TestCase):
process1.group = DummyGroup
from supervisor.events import ProcessCommunicationStderrEvent
event = ProcessCommunicationStderrEvent(process1, 1, 'yo')
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers['processname'], 'process1', headers)
self.assertEqual(headers['groupname'], 'process1', headers)
self.assertEqual(headers['pid'], '1', headers)
@@ -327,21 +327,21 @@ class TestSerializations(unittest.TestCase):
def test_remote_comm_event(self):
from supervisor.events import RemoteCommunicationEvent
event = RemoteCommunicationEvent('foo', 'bar')
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers['type'], 'foo', headers)
self.assertEqual(payload, 'bar')
def test_process_group_added_event(self):
from supervisor.events import ProcessGroupAddedEvent
event = ProcessGroupAddedEvent('foo')
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers['groupname'], 'foo')
self.assertEqual(payload, '')
def test_process_group_removed_event(self):
from supervisor.events import ProcessGroupRemovedEvent
event = ProcessGroupRemovedEvent('foo')
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers['groupname'], 'foo')
self.assertEqual(payload, '')
@@ -360,7 +360,7 @@ class TestSerializations(unittest.TestCase):
process1 = DummyProcess(pconfig1)
process1.group = DummyGroup
event = klass(process1, ProcessStates.STARTING)
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(len(headers), 3)
self.assertEqual(headers['processname'], 'process1')
self.assertEqual(headers['groupname'], 'process1')
@@ -384,7 +384,7 @@ class TestSerializations(unittest.TestCase):
process1.group = DummyGroup
process1.pid = 1
event = klass(process1, ProcessStates.STARTING)
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(len(headers), 4)
self.assertEqual(headers['processname'], 'process1')
self.assertEqual(headers['groupname'], 'process1')
@@ -407,7 +407,7 @@ class TestSerializations(unittest.TestCase):
process1 = DummyProcess(pconfig1)
process1.group = DummyGroup
event = klass(process1, ProcessStates.STARTING)
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(len(headers), 4)
self.assertEqual(headers['processname'], 'process1')
self.assertEqual(headers['groupname'], 'process1')
@@ -416,11 +416,11 @@ class TestSerializations(unittest.TestCase):
self.assertEqual(payload, '')
process1.backoff = 1
event = klass(process1, ProcessStates.STARTING)
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers['tries'], '1')
process1.backoff = 2
event = klass(process1, ProcessStates.STARTING)
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers['tries'], '2')
def test_process_state_exited_event_expected(self):
@@ -436,7 +436,7 @@ class TestSerializations(unittest.TestCase):
event = events.ProcessStateExitedEvent(process1,
ProcessStates.STARTING,
expected=True)
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(len(headers), 5)
self.assertEqual(headers['processname'], 'process1')
self.assertEqual(headers['groupname'], 'process1')
@@ -458,7 +458,7 @@ class TestSerializations(unittest.TestCase):
event = events.ProcessStateExitedEvent(process1,
ProcessStates.STARTING,
expected=False)
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(len(headers), 5)
self.assertEqual(headers['processname'], 'process1')
self.assertEqual(headers['groupname'], 'process1')
@@ -470,7 +470,7 @@ class TestSerializations(unittest.TestCase):
def test_supervisor_sc_event(self):
from supervisor import events
event = events.SupervisorRunningEvent()
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers, {})
self.assertEqual(payload, '')
@@ -482,7 +482,7 @@ class TestSerializations(unittest.TestCase):
events.Tick3600Event,
):
event = klass(1, 2)
- headers, payload = self._deserialize(str(event))
+ headers, payload = self._deserialize(event.payload())
self.assertEqual(headers, {'when':'1'})
self.assertEqual(payload, '')
diff --git a/supervisor/tests/test_http.py b/supervisor/tests/test_http.py
index b36c2b2..aefd2ee 100644
--- a/supervisor/tests/test_http.py
+++ b/supervisor/tests/test_http.py
@@ -122,15 +122,15 @@ class TailFProducerTests(unittest.TestCase):
request = DummyRequest('/logtail/foo', None, None, None)
from supervisor import http
f = tempfile.NamedTemporaryFile()
- f.write(as_bytes('a' * 80))
+ f.write(b'a' * 80)
f.flush()
producer = self._makeOne(request, f.name, 80)
result = producer.more()
- self.assertEqual(result, as_bytes('a' * 80))
- f.write(as_bytes('w' * 100))
+ self.assertEqual(result, b'a' * 80)
+ f.write(as_bytes(b'w' * 100))
f.flush()
result = producer.more()
- self.assertEqual(result, as_bytes('w' * 100))
+ self.assertEqual(result, b'w' * 100)
result = producer.more()
self.assertEqual(result, http.NOT_DONE_YET)
f.truncate(0)
@@ -155,33 +155,33 @@ class TailFProducerTests(unittest.TestCase):
f.flush()
producer = self._makeOne(request, f.name, 80)
result = producer.more()
- self.assertEqual(result, as_bytes('a' * 80))
+ self.assertEqual(result, b'a' * 80)
f.close()
f2 = open(f.name, 'wb')
try:
- f2.write(as_bytes('b' * 80))
+ f2.write(as_bytes(b'b' * 80))
f2.close()
result = producer.more()
finally:
os.unlink(f2.name)
- self.assertEqual(result, as_bytes('b' * 80))
+ self.assertEqual(result, b'b' * 80)
def test_handle_more_follow_file_gone(self):
request = DummyRequest('/logtail/foo', None, None, None)
filename = tempfile.mktemp()
with open(filename, 'wb') as f:
- f.write(as_bytes('a' * 80))
+ f.write(b'a' * 80)
try:
producer = self._makeOne(request, f.name, 80)
finally:
os.unlink(f.name)
result = producer.more()
- self.assertEqual(result, as_bytes('a' * 80))
+ self.assertEqual(result, b'a' * 80)
with open(filename, 'wb') as f:
- f.write(as_bytes('b' * 80))
+ f.write(as_bytes(b'b' * 80))
try:
result = producer.more() # should open in new file
- self.assertEqual(result, as_bytes('b' * 80))
+ self.assertEqual(result, b'b' * 80)
finally:
os.unlink(f.name)
@@ -199,28 +199,28 @@ class DeferringChunkedProducerTests(unittest.TestCase):
self.assertEqual(producer.more(), NOT_DONE_YET)
def test_more_string(self):
- wrapped = DummyProducer('hello')
+ wrapped = DummyProducer(b'hello')
producer = self._makeOne(wrapped)
- self.assertEqual(producer.more(), '5\r\nhello\r\n')
+ self.assertEqual(producer.more(), b'5\r\nhello\r\n')
def test_more_nodata(self):
wrapped = DummyProducer()
- producer = self._makeOne(wrapped, footers=['a', 'b'])
- self.assertEqual(producer.more(), '0\r\na\r\nb\r\n\r\n')
+ producer = self._makeOne(wrapped, footers=[b'a', b'b'])
+ self.assertEqual(producer.more(), b'0\r\na\r\nb\r\n\r\n')
def test_more_nodata_footers(self):
- wrapped = DummyProducer('')
- producer = self._makeOne(wrapped, footers=['a', 'b'])
- self.assertEqual(producer.more(), '0\r\na\r\nb\r\n\r\n')
+ wrapped = DummyProducer(b'')
+ producer = self._makeOne(wrapped, footers=[b'a', b'b'])
+ self.assertEqual(producer.more(), b'0\r\na\r\nb\r\n\r\n')
def test_more_nodata_nofooters(self):
- wrapped = DummyProducer('')
+ wrapped = DummyProducer(b'')
producer = self._makeOne(wrapped)
- self.assertEqual(producer.more(), '0\r\n\r\n')
+ self.assertEqual(producer.more(), b'0\r\n\r\n')
def test_more_noproducer(self):
producer = self._makeOne(None)
- self.assertEqual(producer.more(), '')
+ self.assertEqual(producer.more(), b'')
class DeferringCompositeProducerTests(unittest.TestCase):
def _getTargetClass(self):
@@ -241,12 +241,12 @@ class DeferringCompositeProducerTests(unittest.TestCase):
producer = self._makeOne([wrapped1, wrapped2])
self.assertEqual(producer.more(), 'hello')
self.assertEqual(producer.more(), 'goodbye')
- self.assertEqual(producer.more(), '')
+ self.assertEqual(producer.more(), b'')
def test_more_nodata(self):
wrapped = DummyProducer()
producer = self._makeOne([wrapped])
- self.assertEqual(producer.more(), '')
+ self.assertEqual(producer.more(), b'')
class DeferringGlobbingProducerTests(unittest.TestCase):
def _getTargetClass(self):
@@ -264,16 +264,16 @@ class DeferringGlobbingProducerTests(unittest.TestCase):
def test_more_string(self):
wrapped = DummyProducer('hello', 'there', 'guy')
producer = self._makeOne(wrapped, buffer_size=1)
- self.assertEqual(producer.more(), 'hello')
+ self.assertEqual(producer.more(), b'hello')
wrapped = DummyProducer('hello', 'there', 'guy')
producer = self._makeOne(wrapped, buffer_size=50)
- self.assertEqual(producer.more(), 'hellothereguy')
+ self.assertEqual(producer.more(), b'hellothereguy')
def test_more_nodata(self):
wrapped = DummyProducer()
producer = self._makeOne(wrapped)
- self.assertEqual(producer.more(), '')
+ self.assertEqual(producer.more(), b'')
class DeferringHookedProducerTests(unittest.TestCase):
def _getTargetClass(self):
@@ -305,12 +305,12 @@ class DeferringHookedProducerTests(unittest.TestCase):
def callback(bytes):
L.append(bytes)
producer = self._makeOne(wrapped, callback)
- self.assertEqual(producer.more(), '')
+ self.assertEqual(producer.more(), b'')
self.assertEqual(L, [0])
def test_more_noproducer(self):
producer = self._makeOne(None, None)
- self.assertEqual(producer.more(), '')
+ self.assertEqual(producer.more(), b'')
class DeferringHttpRequestTests(unittest.TestCase):
def _getTargetClass(self):
@@ -676,7 +676,7 @@ class DummyProducer:
if self.data:
return self.data.pop(0)
else:
- return ''
+ return b''
def test_suite():
return unittest.findTestCases(sys.modules[__name__])
diff --git a/supervisor/tests/test_http_client.py b/supervisor/tests/test_http_client.py
index b38723f..ecbdafc 100644
--- a/supervisor/tests/test_http_client.py
+++ b/supervisor/tests/test_http_client.py
@@ -1,6 +1,8 @@
import socket
import sys
import unittest
+
+from supervisor.compat import as_bytes
from supervisor.compat import StringIO
class ListenerTests(unittest.TestCase):
@@ -157,31 +159,31 @@ class HTTPHandlerTests(unittest.TestCase):
def test_handle_connect_no_password(self):
inst = self._makeOne()
pushed = []
- inst.push = lambda val: pushed.append(val)
+ inst.push = lambda val: pushed.append(as_bytes(val))
inst.path = '/'
inst.host = 'localhost'
inst.handle_connect()
self.assertTrue(inst.connected)
self.assertEqual(
pushed,
- ['GET / HTTP/1.1',
- '\r\n',
- 'Host: localhost',
- '\r\n',
- 'Accept-Encoding: chunked',
- '\r\n',
- 'Accept: */*',
- '\r\n',
- 'User-agent: Supervisor HTTP Client',
- '\r\n',
- '\r\n',
- '\r\n']
+ [b'GET / HTTP/1.1',
+ b'\r\n',
+ b'Host: localhost',
+ b'\r\n',
+ b'Accept-Encoding: chunked',
+ b'\r\n',
+ b'Accept: */*',
+ b'\r\n',
+ b'User-agent: Supervisor HTTP Client',
+ b'\r\n',
+ b'\r\n',
+ b'\r\n']
)
def test_handle_connect_with_password(self):
inst = self._makeOne()
pushed = []
- inst.push = lambda val: pushed.append(val)
+ inst.push = lambda val: pushed.append(as_bytes(val))
inst.path = '/'
inst.host = 'localhost'
inst.password = 'password'
@@ -190,20 +192,20 @@ class HTTPHandlerTests(unittest.TestCase):
self.assertTrue(inst.connected)
self.assertEqual(
pushed,
- ['GET / HTTP/1.1',
- '\r\n',
- 'Host: localhost',
- '\r\n',
- 'Accept-Encoding: chunked',
- '\r\n',
- 'Accept: */*',
- '\r\n',
- 'User-agent: Supervisor HTTP Client',
- '\r\n',
- 'Authorization: Basic dXNlcm5hbWU6cGFzc3dvcmQ=',
- '\r\n',
- '\r\n',
- '\r\n'],
+ [b'GET / HTTP/1.1',
+ b'\r\n',
+ b'Host: localhost',
+ b'\r\n',
+ b'Accept-Encoding: chunked',
+ b'\r\n',
+ b'Accept: */*',
+ b'\r\n',
+ b'User-agent: Supervisor HTTP Client',
+ b'\r\n',
+ b'Authorization: Basic dXNlcm5hbWU6cGFzc3dvcmQ=',
+ b'\r\n',
+ b'\r\n',
+ b'\r\n'],
)
def test_feed(self):
@@ -217,7 +219,7 @@ class HTTPHandlerTests(unittest.TestCase):
inst.buffer = 'abc'
inst.collect_incoming_data('foo')
self.assertEqual(inst.listener.fed_data, ['abcfoo'])
- self.assertEqual(inst.buffer, '')
+ self.assertEqual(inst.buffer, b'')
def test_collect_incoming_data_part_is_not_body(self):
inst = self._makeOne()
@@ -234,37 +236,37 @@ class HTTPHandlerTests(unittest.TestCase):
inst.buffer = None
inst.found_terminator()
self.assertEqual(parted, [True])
- self.assertEqual(inst.buffer, '')
+ self.assertEqual(inst.buffer, b'')
def test_ignore(self):
inst = self._makeOne()
inst.buffer = None
inst.ignore()
- self.assertEqual(inst.buffer, '')
+ self.assertEqual(inst.buffer, b'')
def test_status_line_not_startswith_http(self):
inst = self._makeOne()
- inst.buffer = 'NOTHTTP/1.0 200 OK'
+ inst.buffer = b'NOTHTTP/1.0 200 OK'
self.assertRaises(ValueError, inst.status_line)
def test_status_line_200(self):
inst = self._makeOne()
- inst.buffer = 'HTTP/1.0 200 OK'
+ inst.buffer = b'HTTP/1.0 200 OK'
version, status, reason = inst.status_line()
- self.assertEqual(version, 'HTTP/1.0')
+ self.assertEqual(version, b'HTTP/1.0')
self.assertEqual(status, 200)
- self.assertEqual(reason, 'OK')
+ self.assertEqual(reason, b'OK')
self.assertEqual(inst.part, inst.headers)
def test_status_line_not_200(self):
inst = self._makeOne()
- inst.buffer = 'HTTP/1.0 201 OK'
+ inst.buffer = b'HTTP/1.0 201 OK'
closed = []
inst.close = lambda: closed.append(True)
version, status, reason = inst.status_line()
- self.assertEqual(version, 'HTTP/1.0')
+ self.assertEqual(version, b'HTTP/1.0')
self.assertEqual(status, 201)
- self.assertEqual(reason, 'OK')
+ self.assertEqual(reason, b'OK')
self.assertEqual(inst.part, inst.ignore)
self.assertEqual(
inst.listener.error_msg,
@@ -274,8 +276,8 @@ class HTTPHandlerTests(unittest.TestCase):
def test_headers_empty_line_nonchunked(self):
inst = self._makeOne()
- inst.buffer = ''
- inst.encoding = 'not chunked'
+ inst.buffer = b''
+ inst.encoding = b'not chunked'
inst.length = 3
terms = []
inst.set_terminator = lambda L: terms.append(L)
@@ -285,47 +287,47 @@ class HTTPHandlerTests(unittest.TestCase):
def test_headers_empty_line_chunked(self):
inst = self._makeOne()
- inst.buffer = ''
- inst.encoding = 'chunked'
+ inst.buffer = b''
+ inst.encoding = b'chunked'
inst.headers()
self.assertEqual(inst.part, inst.chunked_size)
def test_headers_nonempty_line_no_name_no_value(self):
inst = self._makeOne()
- inst.buffer = ':'
+ inst.buffer = b':'
self.assertEqual(inst.headers(), None)
def test_headers_nonempty_line_transfer_encoding(self):
inst = self._makeOne()
- inst.buffer = 'Transfer-Encoding: chunked'
+ inst.buffer = b'Transfer-Encoding: chunked'
responses = []
inst.response_header = lambda n, v: responses.append((n, v))
inst.headers()
- self.assertEqual(inst.encoding, 'chunked')
- self.assertEqual(responses, [('transfer-encoding', 'chunked')])
+ self.assertEqual(inst.encoding, b'chunked')
+ self.assertEqual(responses, [(b'transfer-encoding', b'chunked')])
def test_headers_nonempty_line_content_length(self):
inst = self._makeOne()
- inst.buffer = 'Content-Length: 3'
+ inst.buffer = b'Content-Length: 3'
responses = []
inst.response_header = lambda n, v: responses.append((n, v))
inst.headers()
self.assertEqual(inst.length, 3)
- self.assertEqual(responses, [('content-length', '3')])
+ self.assertEqual(responses, [(b'content-length', b'3')])
def test_headers_nonempty_line_arbitrary(self):
inst = self._makeOne()
- inst.buffer = 'X-Test: abc'
+ inst.buffer = b'X-Test: abc'
responses = []
inst.response_header = lambda n, v: responses.append((n, v))
inst.headers()
- self.assertEqual(responses, [('x-test', 'abc')])
+ self.assertEqual(responses, [(b'x-test', b'abc')])
def test_response_header(self):
inst = self._makeOne()
- inst.response_header('a', 'b')
- self.assertEqual(inst.listener.response_header_name, 'a')
- self.assertEqual(inst.listener.response_header_value, 'b')
+ inst.response_header(b'a', b'b')
+ self.assertEqual(inst.listener.response_header_name, b'a')
+ self.assertEqual(inst.listener.response_header_value, b'b')
def test_body(self):
inst = self._makeOne()
@@ -342,14 +344,14 @@ class HTTPHandlerTests(unittest.TestCase):
def test_chunked_size_empty_line(self):
inst = self._makeOne()
- inst.buffer = ''
+ inst.buffer = b''
inst.length = 1
self.assertEqual(inst.chunked_size(), None)
self.assertEqual(inst.length, 1)
def test_chunked_size_zero_size(self):
inst = self._makeOne()
- inst.buffer = '0'
+ inst.buffer = b'0'
inst.length = 1
self.assertEqual(inst.chunked_size(), None)
self.assertEqual(inst.length, 1)
@@ -357,7 +359,7 @@ class HTTPHandlerTests(unittest.TestCase):
def test_chunked_size_nonzero_size(self):
inst = self._makeOne()
- inst.buffer = '10'
+ inst.buffer = b'10'
inst.length = 1
terms = []
inst.set_terminator = lambda sz: terms.append(sz)
@@ -369,19 +371,19 @@ class HTTPHandlerTests(unittest.TestCase):
def test_chunked_body(self):
from supervisor.http_client import CRLF
inst = self._makeOne()
- inst.buffer = 'buffer'
+ inst.buffer = b'buffer'
terms = []
lines = []
inst.set_terminator = lambda v: terms.append(v)
inst.feed = lambda v: lines.append(v)
inst.chunked_body()
self.assertEqual(terms, [CRLF])
- self.assertEqual(lines, ['buffer'])
+ self.assertEqual(lines, [b'buffer'])
self.assertEqual(inst.part, inst.chunked_size)
def test_trailer_line_not_crlf(self):
inst = self._makeOne()
- inst.buffer = ''
+ inst.buffer = b''
self.assertEqual(inst.trailer(), None)
def test_trailer_line_crlf(self):
diff --git a/supervisor/tests/test_loggers.py b/supervisor/tests/test_loggers.py
index 12e4e03..47c0abb 100644
--- a/supervisor/tests/test_loggers.py
+++ b/supervisor/tests/test_loggers.py
@@ -99,28 +99,28 @@ class BareHandlerTests(HandlerTests, unittest.TestCase):
def test_emit_gardenpath(self):
stream = DummyStream()
inst = self._makeOne(stream=stream)
- record = self._makeLogRecord('foo')
+ record = self._makeLogRecord(b'foo')
inst.emit(record)
self.assertEqual(stream.flushed, True)
- self.assertEqual(stream.written, 'foo')
+ self.assertEqual(stream.written, b'foo')
def test_emit_unicode_error(self):
stream = DummyStream(error=UnicodeError)
inst = self._makeOne(stream=stream)
- record = self._makeLogRecord('foo')
+ record = self._makeLogRecord(b'foo')
inst.emit(record)
self.assertEqual(stream.flushed, True)
- self.assertEqual(stream.written, 'foo')
+ self.assertEqual(stream.written, b'foo')
def test_emit_other_error(self):
- stream = DummyStream(error=TypeError)
+ stream = DummyStream(error=ValueError)
inst = self._makeOne(stream=stream)
handled = []
inst.handleError = lambda: handled.append(True)
- record = self._makeLogRecord('foo')
+ record = self._makeLogRecord(b'foo')
inst.emit(record)
self.assertEqual(stream.flushed, False)
- self.assertEqual(stream.written, '')
+ self.assertEqual(stream.written, b'')
class FileHandlerTests(HandlerTests, unittest.TestCase):
def _getTargetClass(self):
@@ -130,7 +130,7 @@ class FileHandlerTests(HandlerTests, unittest.TestCase):
def test_ctor(self):
handler = self._makeOne(self.filename)
self.assertTrue(os.path.exists(self.filename), self.filename)
- self.assertEqual(handler.mode, 'a')
+ self.assertEqual(handler.mode, 'ab')
self.assertEqual(handler.baseFilename, self.filename)
self.assertEqual(handler.stream.name, self.filename)
handler.close()
@@ -190,15 +190,15 @@ class FileHandlerTests(HandlerTests, unittest.TestCase):
def test_emit_ascii_noerror(self):
handler = self._makeOne(self.filename)
- record = self._makeLogRecord('hello!')
+ record = self._makeLogRecord(b'hello!')
handler.emit(record)
handler.close()
- with open(self.filename, 'r') as f:
- self.assertEqual(f.read(), 'hello!')
+ with open(self.filename, 'rb') as f:
+ self.assertEqual(f.read(), b'hello!')
def test_emit_unicode_noerror(self):
handler = self._makeOne(self.filename)
- record = self._makeLogRecord(as_string(b'fi\xc3\xad'))
+ record = self._makeLogRecord(b'fi\xc3\xad')
handler.emit(record)
handler.close()
with open(self.filename, 'rb') as f:
@@ -208,7 +208,7 @@ class FileHandlerTests(HandlerTests, unittest.TestCase):
handler = self._makeOne(self.filename)
handler.stream.close()
handler.stream = DummyStream(error=OSError)
- record = self._makeLogRecord('hello!')
+ record = self._makeLogRecord(b'hello!')
try:
old_stderr = sys.stderr
dummy_stderr = DummyStream()
@@ -217,7 +217,7 @@ class FileHandlerTests(HandlerTests, unittest.TestCase):
finally:
sys.stderr = old_stderr
- self.assertTrue(dummy_stderr.written.endswith('OSError\n'),
+ self.assertTrue(dummy_stderr.written.endswith(b'OSError\n'),
dummy_stderr.written)
if os.path.exists('/dev/stdout'):
@@ -231,7 +231,7 @@ class StdoutTests(StdoutTestsBase):
handler = self._makeOne('/dev/stdout')
# Modes 'w' and 'a' have the same semantics when applied to
# character device files and fifos.
- self.assertTrue(handler.mode in ['w', 'a'], handler.mode)
+ self.assertTrue(handler.mode in ['wb', 'ab'], handler.mode)
self.assertEqual(handler.baseFilename, '/dev/stdout')
self.assertEqual(handler.stream.name, '/dev/stdout')
handler.close()
@@ -244,14 +244,14 @@ class RotatingFileHandlerTests(FileHandlerTests):
def test_ctor(self):
handler = self._makeOne(self.filename)
- self.assertEqual(handler.mode, 'a')
+ self.assertEqual(handler.mode, 'ab')
self.assertEqual(handler.maxBytes, 512*1024*1024)
self.assertEqual(handler.backupCount, 10)
handler.close()
def test_emit_does_rollover(self):
handler = self._makeOne(self.filename, maxBytes=10, backupCount=2)
- record = self._makeLogRecord('a' * 4)
+ record = self._makeLogRecord(b'a' * 4)
handler.emit(record) # 4 bytes
self.assertFalse(os.path.exists(self.filename + '.1'))
@@ -282,18 +282,18 @@ class RotatingFileHandlerTests(FileHandlerTests):
self.assertTrue(os.path.exists(self.filename + '.1'))
self.assertTrue(os.path.exists(self.filename + '.2'))
- with open(self.filename, 'r') as f:
- self.assertEqual(f.read(), 'a' * 4)
+ with open(self.filename, 'rb') as f:
+ self.assertEqual(f.read(), b'a' * 4)
- with open(self.filename+'.1', 'r') as f:
- self.assertEqual(f.read(), 'a' * 12)
+ with open(self.filename+'.1', 'rb') as f:
+ self.assertEqual(f.read(), b'a' * 12)
- with open(self.filename+'.2', 'r') as f:
- self.assertEqual(f.read(), 'a' * 12)
+ with open(self.filename+'.2', 'rb') as f:
+ self.assertEqual(f.read(), b'a' * 12)
def test_current_logfile_removed(self):
handler = self._makeOne(self.filename, maxBytes=6, backupCount=1)
- record = self._makeLogRecord('a' * 4)
+ record = self._makeLogRecord(b'a' * 4)
handler.emit(record) # 4 bytes
self.assertTrue(os.path.exists(self.filename))
@@ -388,23 +388,23 @@ class BoundIOTests(unittest.TestCase):
return klass(maxbytes, buf)
def test_write_overflow(self):
- io = self._makeOne(1, 'a')
- io.write('b')
- self.assertEqual(io.buf, 'b')
+ io = self._makeOne(1, b'a')
+ io.write(b'b')
+ self.assertEqual(io.buf, b'b')
def test_getvalue(self):
- io = self._makeOne(1, 'a')
- self.assertEqual(io.getvalue(), 'a')
+ io = self._makeOne(1, b'a')
+ self.assertEqual(io.getvalue(), b'a')
def test_clear(self):
- io = self._makeOne(1, 'a')
+ io = self._makeOne(1, b'a')
io.clear()
- self.assertEqual(io.buf, '')
+ self.assertEqual(io.buf, b'')
def test_close(self):
- io = self._makeOne(1, 'a')
+ io = self._makeOne(1, b'a')
io.close()
- self.assertEqual(io.buf, '')
+ self.assertEqual(io.buf, b'')
class LoggerTests(unittest.TestCase):
def _getTargetClass(self):
diff --git a/supervisor/tests/test_xmlrpc.py b/supervisor/tests/test_xmlrpc.py
index e30b4d7..2b4da32 100644
--- a/supervisor/tests/test_xmlrpc.py
+++ b/supervisor/tests/test_xmlrpc.py
@@ -413,7 +413,7 @@ class SupervisorTransportTests(unittest.TestCase):
self.assertEqual(dummy_conn.closed, True)
self.assertEqual(dummy_conn.requestargs[0], 'POST')
self.assertEqual(dummy_conn.requestargs[1], '/')
- self.assertEqual(dummy_conn.requestargs[2], '')
+ self.assertEqual(dummy_conn.requestargs[2], b'')
self.assertEqual(dummy_conn.requestargs[3]['Content-Length'], '0')
self.assertEqual(dummy_conn.requestargs[3]['Content-Type'], 'text/xml')
self.assertEqual(dummy_conn.requestargs[3]['Authorization'],
@@ -439,7 +439,7 @@ class SupervisorTransportTests(unittest.TestCase):
self.assertEqual(dummy_conn.closed, False)
self.assertEqual(dummy_conn.requestargs[0], 'POST')
self.assertEqual(dummy_conn.requestargs[1], '/')
- self.assertEqual(dummy_conn.requestargs[2], '')
+ self.assertEqual(dummy_conn.requestargs[2], b'')
self.assertEqual(dummy_conn.requestargs[3]['Content-Length'], '0')
self.assertEqual(dummy_conn.requestargs[3]['Content-Type'], 'text/xml')
self.assertEqual(dummy_conn.requestargs[3]['Authorization'],
diff --git a/supervisor/xmlrpc.py b/supervisor/xmlrpc.py
index 1c9f43f..a26c52d 100644
--- a/supervisor/xmlrpc.py
+++ b/supervisor/xmlrpc.py
@@ -1,10 +1,10 @@
-import types
+import datetime
import re
-import traceback
import socket
import sys
-import datetime
import time
+import traceback
+import types
from xml.etree.ElementTree import iterparse
from supervisor.compat import xmlrpclib
@@ -16,6 +16,7 @@ from supervisor.compat import as_string
from supervisor.compat import encodestring
from supervisor.compat import decodestring
from supervisor.compat import httplib
+from supervisor.compat import PY3
from supervisor.medusa.http_server import get_header
from supervisor.medusa.xmlrpc_handler import xmlrpc_handler
@@ -382,6 +383,12 @@ class supervisor_xmlrpc_handler(xmlrpc_handler):
try:
try:
+ # on 2.x, the Expat parser doesn't like Unicode which actually
+ # contains non-ASCII characters. It's a bit of a kludge to
+ # do it conditionally here, but it's down to how underlying
+ # libs behave
+ if not PY3:
+ data = data.encode('ascii', 'xmlcharrefreplace')
params, method = self.loads(data)
except:
logger.error(
@@ -427,7 +434,7 @@ class supervisor_xmlrpc_handler(xmlrpc_handler):
# if we get anything but a function, it implies that this
# response doesn't need to be deferred, we can service it
# right away.
- body = xmlrpc_marshal(value)
+ body = as_bytes(xmlrpc_marshal(value))
request['Content-Type'] = 'text/xml'
request['Content-Length'] = len(body)
request.push(body)
@@ -499,6 +506,7 @@ class SupervisorTransport(xmlrpclib.Transport):
raise ValueError('Unknown protocol for serverurl %s' % serverurl)
def request(self, host, handler, request_body, verbose=0):
+ request_body = as_bytes(request_body)
if not self.connection:
self.connection = self._get_connection()
self.headers = {
@@ -529,6 +537,10 @@ class SupervisorTransport(xmlrpclib.Transport):
r.reason,
'' )
data = r.read()
+ data = as_string(data)
+ # on 2.x, the Expat parser doesn't like Unicode which actually
+ # contains non-ASCII characters
+ data = data.encode('ascii', 'xmlcharrefreplace')
p, u = self.getparser()
p.feed(data)
p.close()
Results for the issues tagged with "python 3":
Issue 836
---------
Server:
$ supervisord -c ~/Documents/issue-836.conf
2017-01-18 08:32:58,611 INFO RPC interface 'supervisor' initialized
2017-01-18 08:32:58,611 CRIT Server 'inet_http_server' running without any HTTP authentication checking
2017-01-18 08:32:58,611 INFO supervisord started with pid 3765
2017-01-18 08:32:59,617 INFO spawned: 'cat' with pid 3768
2017-01-18 08:33:00,622 INFO success: cat entered RUNNING state, process has stayed up for > than 1 seconds (startsecs)
2017-01-18 08:33:46,972 DEBG 'cat' stdout output:
Hi
2017-01-18 08:33:55,160 DEBG 'cat' stdout output:
Hello
2017-01-18 08:34:44,448 DEBG 'cat' stdout output:
The Øresund bridge ends in Malmö
Client:
$ supervisorctl -c ~/Documents/issue-836.conf fg cat
==> Press Ctrl-C to exit <==
Hi
Hi
Hello
Hello
The Øresund bridge ends in Malmö
The Øresund bridge ends in Malmö
Issue 835
---------
Server:
$ supervisord -c ~/Documents/issue-836.conf
2017-01-18 08:37:02,400 INFO RPC interface 'supervisor' initialized
2017-01-18 08:37:02,400 CRIT Server 'inet_http_server' running without any HTTP authentication checking
2017-01-18 08:37:02,400 INFO supervisord started with pid 3881
2017-01-18 08:37:03,407 INFO spawned: 'cat' with pid 3884
2017-01-18 08:37:04,412 INFO success: cat entered RUNNING state, process has stayed up for > than 1 seconds (startsecs)
2017-01-18 08:38:02,152 DEBG 'cat' stdout output:
hello
Client:
$ python3
Python 3.5.2 (default, Nov 17 2016, 17:05:23)
[GCC 5.4.0 20160609] on linux
Type "help", "copyright", "credits" or "license" for more information.
>>> from xmlrpc.client import ServerProxy
>>> server = ServerProxy('http://127.0.0.1:9001/RPC2', verbose=True)
>>> server.supervisor.sendProcessStdin('cat', 'hello')
send: b'POST /RPC2 HTTP/1.1\r\nHost: 127.0.0.1:9001\r\nAccept-Encoding: gzip\r\nContent-Type: text/xml\r\nUser-Agent: Python-xmlrpc/3.5\r\nContent-Length: 229\r\n\r\n'
send: b"<?xml version='1.0'?>\n<methodCall>\n<methodName>supervisor.sendProcessStdin</methodName>\n<params>\n<param>\n<value><string>cat</string></value>\n</param>\n<param>\n<value><string>hello</string></value>\n</param>\n</params>\n</methodCall>\n"
reply: 'HTTP/1.1 200 OK\r\n'
header: Content-Type header: Server header: Content-Length header: Date body: b"<?xml version='1.0'?>\n<methodResponse>\n<params>\n<param>\n<value><boolean>1</boolean></value>\n</param>\n</params>\n</methodResponse>\n"
True
>>>
Issue 664
---------
Server:
$ supervisord -c ~/Documents/issue-664.conf
2017-01-18 08:40:24,241 INFO RPC interface 'supervisor' initialized
2017-01-18 08:40:24,241 CRIT Server 'inet_http_server' running without any HTTP authentication checking
2017-01-18 08:40:24,241 INFO supervisord started with pid 3931
2017-01-18 08:40:25,247 INFO spawned: 'test_öäü' with pid 3934
2017-01-18 08:40:26,252 INFO success: test_öäü entered RUNNING state, process has stayed up for > than 1 seconds (startsecs)
Client:
$ supervisorctl -c ~/Documents/issue-664.conf status
test_öäü RUNNING pid 3934, uptime 0:01:00
Issue 663
---------
Server:
$ supervisord -c ~/Documents/issue-663.conf
2017-01-18 08:44:46,801 INFO RPC interface 'supervisor' initialized
2017-01-18 08:44:46,801 CRIT Server 'inet_http_server' running without any HTTP authentication checking
2017-01-18 08:44:46,801 INFO supervisord started with pid 4002
2017-01-18 08:44:47,807 INFO spawned: 'listener' with pid 4005
2017-01-18 08:44:47,856 DEBG 'listener' stdout output:
b'READY\n'
2017-01-18 08:44:47,857 DEBG listener: ACKNOWLEDGED -> READY
2017-01-18 08:44:48,859 INFO success: listener entered RUNNING state, process has stayed up for > than 1 seconds (startsecs)
2017-01-18 08:44:51,867 DEBG event 0 sent to listener listener
2017-01-18 08:44:51,869 DEBG 'listener' stderr output:
ver:3.0 server:supervisor serial:0 pool:listener poolserial:0 eventname:TICK_5 len:15
when:1484729090
2017-01-18 08:44:51,869 DEBG 'listener' stdout output:
b'RESULT 2\nOKREADY\n'
2017-01-18 08:44:51,869 DEBG listener: event was processed
2017-01-18 08:44:51,869 DEBG listener: BUSY -> ACKNOWLEDGED
2017-01-18 08:44:51,870 DEBG listener: ACKNOWLEDGED -> READY
2017-01-18 08:44:56,880 DEBG event 1 sent to listener listener
2017-01-18 08:44:56,881 DEBG 'listener' stderr output:
ver:3.0 server:supervisor serial:1 pool:listener poolserial:1 eventname:TICK_5 len:15
when:1484729095
2017-01-18 08:44:56,881 DEBG 'listener' stdout output:
b'RESULT 2\nOKREADY\n'
2017-01-18 08:44:56,882 DEBG listener: event was processed
2017-01-18 08:44:56,882 DEBG listener: BUSY -> ACKNOWLEDGED
2017-01-18 08:44:56,882 DEBG listener: ACKNOWLEDGED -> READY
2017-01-18 08:45:01,893 DEBG event 2 sent to listener listener
2017-01-18 08:45:01,914 DEBG 'listener' stderr output:
ver:3.0 server:supervisor serial:2 pool:listener poolserial:2 eventname:TICK_5 len:15
when:1484729100
2017-01-18 08:45:01,915 DEBG 'listener' stdout output:
b'RESULT 2\nOKREADY\n'
2017-01-18 08:45:01,915 DEBG listener: event was processed
2017-01-18 08:45:01,915 DEBG listener: BUSY -> ACKNOWLEDGED
2017-01-18 08:45:01,915 DEBG listener: ACKNOWLEDGED -> READY
2017-01-18 08:45:06,924 DEBG event 3 sent to listener listener
2017-01-18 08:45:06,926 DEBG 'listener' stderr output:
ver:3.0 server:supervisor serial:3 pool:listener poolserial:3 eventname:TICK_5 len:15
when:1484729105
2017-01-18 08:45:06,927 DEBG 'listener' stdout output:
b'RESULT 2\nOKREADY\n'
2017-01-18 08:45:06,927 DEBG listener: event was processed
2017-01-18 08:45:06,927 DEBG listener: BUSY -> ACKNOWLEDGED
2017-01-18 08:45:06,927 DEBG listener: ACKNOWLEDGED -> READY
Client:
None
Issue 638
---------
Server:
$ supervisord -c ~/Documents/issue-638.conf
2017-01-18 08:47:13,745 INFO RPC interface 'supervisor' initialized
2017-01-18 08:47:13,746 CRIT Server 'unix_http_server' running without any HTTP authentication checking
2017-01-18 08:47:13,746 INFO supervisord started with pid 4050
2017-01-18 08:47:14,753 INFO spawned: 'produce-unicode-error' with pid 4053
2017-01-18 08:47:14,816 DEBG 'produce-unicode-error' stdout output:
Undecodable: b'\x88\n'
2017-01-18 08:47:14,817 DEBG fd 9 closed, stopped monitoring <POutputDispatcher at 140685302247832 for <Subprocess at 140685302557552 with name produce-unicode-error in state STARTING> (stderr)>
2017-01-18 08:47:14,817 DEBG fd 7 closed, stopped monitoring <POutputDispatcher at 140685302230320 for <Subprocess at 140685302557552 with name produce-unicode-error in state STARTING> (stdout)>
2017-01-18 08:47:14,818 INFO exited: produce-unicode-error (exit status 0; not expected)
2017-01-18 08:47:14,818 DEBG received SIGCHLD indicating a child quit
2017-01-18 08:47:15,825 INFO spawned: 'produce-unicode-error' with pid 4054
2017-01-18 08:47:15,836 DEBG 'produce-unicode-error' stdout output:
Undecodable: b'\x88\n'
2017-01-18 08:47:15,844 DEBG fd 7 closed, stopped monitoring <POutputDispatcher at 140685302247832 for <Subprocess at 140685302557552 with name produce-unicode-error in state STARTING> (stdout)>
2017-01-18 08:47:15,845 DEBG fd 9 closed, stopped monitoring <POutputDispatcher at 140685302230376 for <Subprocess at 140685302557552 with name produce-unicode-error in state STARTING> (stderr)>
2017-01-18 08:47:15,845 INFO exited: produce-unicode-error (exit status 0; not expected)
2017-01-18 08:47:15,846 DEBG received SIGCHLD indicating a child quit
2017-01-18 08:47:17,855 INFO spawned: 'produce-unicode-error' with pid 4055
2017-01-18 08:47:17,865 DEBG 'produce-unicode-error' stdout output:
Undecodable: b'\x88\n'
2017-01-18 08:47:17,870 DEBG fd 9 closed, stopped monitoring <POutputDispatcher at 140685302247832 for <Subprocess at 140685302557552 with name produce-unicode-error in state STARTING> (stderr)>
2017-01-18 08:47:17,871 DEBG fd 7 closed, stopped monitoring <POutputDispatcher at 140685302230096 for <Subprocess at 140685302557552 with name produce-unicode-error in state STARTING> (stdout)>
2017-01-18 08:47:17,872 INFO exited: produce-unicode-error (exit status 0; not expected)
2017-01-18 08:47:17,872 DEBG received SIGCHLD indicating a child quit
2017-01-18 08:47:20,882 INFO spawned: 'produce-unicode-error' with pid 4057
2017-01-18 08:47:20,894 DEBG 'produce-unicode-error' stdout output:
Undecodable: b'\x88\n'
2017-01-18 08:47:20,897 DEBG fd 9 closed, stopped monitoring <POutputDispatcher at 140685302230320 for <Subprocess at 140685302557552 with name produce-unicode-error in state STARTING> (stderr)>
2017-01-18 08:47:20,898 DEBG fd 7 closed, stopped monitoring <POutputDispatcher at 140685302247944 for <Subprocess at 140685302557552 with name produce-unicode-error in state STARTING> (stdout)>
2017-01-18 08:47:20,899 INFO exited: produce-unicode-error (exit status 0; not expected)
2017-01-18 08:47:20,900 DEBG received SIGCHLD indicating a child quit
2017-01-18 08:47:21,902 INFO gave up: produce-unicode-error entered FATAL state, too many start retries too quickly
Client:
None
Issue 565
---------
Server:
$ supervisord -c ~/Documents/issue-565.conf
2017-01-18 08:48:24,536 INFO RPC interface 'supervisor' initialized
2017-01-18 08:48:24,536 CRIT Server 'unix_http_server' running without any HTTP authentication checking
2017-01-18 08:48:24,536 INFO supervisord started with pid 4086
2017-01-18 08:48:25,542 INFO spawned: 'listener' with pid 4089
2017-01-18 08:48:25,550 INFO spawned: 'hello' with pid 4090
2017-01-18 08:48:26,589 INFO success: listener entered RUNNING state, process has stayed up for > than 1 seconds (startsecs)
2017-01-18 08:48:26,590 INFO success: hello entered RUNNING state, process has stayed up for > than 1 seconds (startsecs)
Client:
$ supervisorctl -c ~/Documents/issue-565.conf tail -f hello
==> Press Ctrl-C to exit <==
dge ends in Malmö - 44
The Øresund bridge ends in Malmö - 45
The Øresund bridge ends in Malmö - 46
The Øresund bridge ends in Malmö - 47
The Øresund bridge ends in Malmö - 48
The Øresund bridge ends in Malmö - 49
The Øresund bridge ends in Malmö - 50
The Øresund bridge ends in Malmö - 51
The Øresund bridge ends in Malmö - 52
The Øresund bridge ends in Malmö - 53
The Øresund bridge ends in Malmö - 54
The Øresund bridge ends in Malmö - 55
The Øresund bridge ends in Malmö - 56
The Øresund bridge ends in Malmö - 57
The Øresund bridge ends in Malmö - 58
The Øresund bridge ends in Malmö - 59
The Øresund bridge ends in Malmö - 60
The Øresund bridge ends in Malmö - 61
The Øresund bridge ends in Malmö - 62
The Øresund bridge ends in Malmö - 63
The Øresund bridge ends in Malmö - 64
The Øresund bridge ends in Malmö - 65
The Øresund bridge ends in Malmö - 66
The Øresund bridge ends in Malmö - 67
The Øresund bridge ends in Malmö - 68
The Øresund bridge ends in Malmö - 69
The Øresund bridge ends in Malmö - 70
The Øresund bridge ends in Malmö - 71
The Øresund bridge ends in Malmö - 72
The Øresund bridge ends in Malmö - 73
^C
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment