from subprocess import Popen, PIPE | |
from time import sleep | |
# run the shell as a subprocess: | |
p = Popen(['python', 'shell.py'], | |
stdin = PIPE, stdout = PIPE, stderr = PIPE, shell = False) | |
# issue command: | |
p.stdin.write('command\n') | |
# let the shell output the result: | |
sleep(0.1) | |
# get the output | |
while True: | |
output = p.stdout.read() # <-- Hangs here! | |
if not output: | |
print '[No more data]' | |
break | |
print output |
from subprocess import Popen, PIPE | |
from time import sleep | |
from fcntl import fcntl, F_GETFL, F_SETFL | |
from os import O_NONBLOCK, read | |
# run the shell as a subprocess: | |
p = Popen(['python', 'shell.py'], | |
stdin = PIPE, stdout = PIPE, stderr = PIPE, shell = False) | |
# set the O_NONBLOCK flag of p.stdout file descriptor: | |
flags = fcntl(p.stdout, F_GETFL) # get current p.stdout flags | |
fcntl(p.stdout, F_SETFL, flags | O_NONBLOCK) | |
# issue command: | |
p.stdin.write('command\n') | |
# let the shell output the result: | |
sleep(0.1) | |
# get the output | |
while True: | |
try: | |
print read(p.stdout.fileno(), 1024), | |
except OSError: | |
# the os throws an exception if there is no data | |
print '[No more data]' | |
break |
from subprocess import Popen, PIPE | |
from time import sleep | |
from nbstreamreader import NonBlockingStreamReader as NBSR | |
# run the shell as a subprocess: | |
p = Popen(['python', 'shell.py'], | |
stdin = PIPE, stdout = PIPE, stderr = PIPE, shell = False) | |
# wrap p.stdout with a NonBlockingStreamReader object: | |
nbsr = NBSR(p.stdout) | |
# issue command: | |
p.stdin.write('command\n') | |
# get the output | |
while True: | |
output = nbsr.readline(0.1) # 0.1 secs to let the shell output the result | |
if not output: | |
print '[No more data]' | |
break | |
print output |
from threading import Thread | |
from Queue import Queue, Empty | |
class NonBlockingStreamReader: | |
def __init__(self, stream): | |
''' | |
stream: the stream to read from. | |
Usually a process' stdout or stderr. | |
''' | |
self._s = stream | |
self._q = Queue() | |
def _populateQueue(stream, queue): | |
''' | |
Collect lines from 'stream' and put them in 'quque'. | |
''' | |
while True: | |
line = stream.readline() | |
if line: | |
queue.put(line) | |
else: | |
raise UnexpectedEndOfStream | |
self._t = Thread(target = _populateQueue, | |
args = (self._s, self._q)) | |
self._t.daemon = True | |
self._t.start() #start collecting lines from the stream | |
def readline(self, timeout = None): | |
try: | |
return self._q.get(block = timeout is not None, | |
timeout = timeout) | |
except Empty: | |
return None | |
class UnexpectedEndOfStream(Exception): pass |
This comment has been minimized.
This comment has been minimized.
Nice! Thank you. |
This comment has been minimized.
This comment has been minimized.
This is great ... what's the license? Under python3 I had to change the import statement in nbstreamreader.py to |
This comment has been minimized.
This comment has been minimized.
Thank you! You've saved my day! |
This comment has been minimized.
This comment has been minimized.
Thank you so much |
This comment has been minimized.
This comment has been minimized.
Stating the obvious: This will not work on Microsoft Windows ("No module named 'fcntl'"). |
This comment has been minimized.
This comment has been minimized.
Hi, So I was trying your code. And it ended without even waiting to get some input from a user. There seems to be wrong about your code. Output i received: Enter command: You entered: command [No more data] Process finished with exit code 0 |
This comment has been minimized.
This comment has been minimized.
Same problem here, how can we use this code? |
This comment has been minimized.
This comment has been minimized.
Hi @EyalAr I used your code with reference in GPL2 code here: |
This comment has been minimized.
This comment has been minimized.
Thanks for the code. It seems there is a small File Descriptor leak that is revealed when using multiprocessing. See below for example:
|
This comment has been minimized.
Thank you! You've saved me quite a bit of time😁