Skip to content

Instantly share code, notes, and snippets.

@joedougherty
Last active September 9, 2019 15:07
Show Gist options
  • Save joedougherty/a79a0253d036581c8686d49aa0cda8f3 to your computer and use it in GitHub Desktop.
Save joedougherty/a79a0253d036581c8686d49aa0cda8f3 to your computer and use it in GitHub Desktop.
shell out...in parallel
from multiprocessing import Pool
import subprocess
import sys
def run(args):
cmd, echo = args
process = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
)
stdout, stderr = process.communicate()
if echo:
# Since the stdout/stderr streams for each process are being captured
# we'll need to explicitly write them out if we want console output.
sys.stdout.write(stdout.decode())
sys.stdout.flush()
sys.stderr.write(stderr.decode())
sys.stderr.flush()
return {
'cmd': cmd,
'returncode': process.returncode,
'stdout': stdout.decode(),
'stderr': stderr.decode(),
}
def autopool(commands, echo=False):
# Initialize the Pool with the num of commands you intend to run.
pool = Pool(len(commands))
# Run the tasks!
#
# map() is a cool method.
#
# * The first arg is the function you want to run in parallel.
# * The second arg is a list containing the argument(s)
# that get passed to run()
#
# SOURCES:
# Pool() docs: https://docs.python.org/3.6/library/multiprocessing.html?highlight=process#multiprocessing.pool.Pool
#
results = pool.map(run, [(c, echo) for c in commands])
pool.close()
pool.join()
return results
# Now you can invoke multiple commands simultaneously.
results = autopool(
['ls -al | grep py', 'echo hello', 'python test.py'],
echo=False,
)
# "results" is a list -- one item per process run.
# Each item here will be the dict returned by run()
# This way, you can still inspect the return code, stdout/stderr of each command.
# Uncomment this to inspect results in IPython
#from IPython import embed
#embed()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment