Skip to content

Instantly share code, notes, and snippets.

@daniellawrence
Created October 17, 2013 10:54
Show Gist options
  • Star 6 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save daniellawrence/7022913 to your computer and use it in GitHub Desktop.
Save daniellawrence/7022913 to your computer and use it in GitHub Desktop.
Queuing fabric tasks and stream reading output using celery & redis - A little bit of evil with tempfiles and stdout redirection.
# RUN ME, I am the worker!
# $ pip install fabric celery-with-redis
# $ celery -A tasks worker -E --loglevel=debug
from celery import Celery
from time import sleep
from fabric.api import env, run, execute
import sys
celery = Celery('tasks', broker='redis://', backend='redis://')
@celery.task(track_started=True)
def run_commands(output_filename):
output = open(output_filename, 'w')
env.host_string = 'localhost'
sys.stdout = output
sleep(1)
run("uname;uname;uname;uname;uname")
sleep(2)
run("uname -a")
run("sleep 2")
run("tty")
sleep(2)
run("echo DONE!")
sys.stdout = sys.__stdout__
#!/usr/bin/env python
# RUN ME, I queue the task then read the output
# python ./test.py
from tasks import run_commands
from time import sleep
from tempfile import NamedTemporaryFile
output = NamedTemporaryFile()
print "created outputfile: %s" % output.name
# Queue up a new task.
task = run_commands.delay(output_filename=output.name)
print "print fabfile output as it happens"
# Keep looping until the task has finished.
while True:
new_printable_output = output.read()
if new_printable_output:
print new_printable_output
if task.state not in ['PENDING', 'STARTED']:
break
sleep(.5)
print "*** Fabric task has finished ***"
print "-"*40
print "- printing all out the output from the fabric task -"
print "-"*40
output.seek(0)
print output.read()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment