Skip to content

Instantly share code, notes, and snippets.

@clusterfudge
Created November 20, 2016 16:01
Show Gist options
  • Save clusterfudge/97e7fb4972bd7b68c4b00ed943d908da to your computer and use it in GitHub Desktop.
Save clusterfudge/97e7fb4972bd7b68c4b00ed943d908da to your computer and use it in GitHub Desktop.
import argparse
import sys
import redis
import json
from multiprocessing.pool import Pool
import datetime
import time
"""
Narcissist code
"""
def compute_narc(digits, cache=[]):
return sum([cache[digit] for digit in digits])
def int_to_sorted_digits(i):
l = list(str(i))
l.sort(reverse=True)
return [int(x) for x in l]
def generate_digit_sorted_numbers(length, init=9):
numbers = [init] * length
while numbers[0] > 0:
idx = -1
if numbers[idx] >= 0:
yield numbers
numbers[idx] -= 1
else:
rollover = numbers.index(-1)
numbers[rollover - 1] -= 1
cur = rollover
while cur < length:
numbers[cur] = numbers[cur - 1]
cur += 1
def compare_lists(list1, list2):
if len(list1) != len(list2):
return False
for i in xrange(len(list1)):
if list1[i] != list2[i]:
return False
return True
"""
Queueing code
"""
class RedisQueue(object):
"""Simple Queue with Redis Backend
Absconded from http://peter-hoffmann.com/2012/python-simple-queue-redis-queue.html
"""
def __init__(self, name, namespace='queue', **redis_kwargs):
"""The default connection parameters are: host='localhost', port=6379, db=0"""
self.__db= redis.Redis(**redis_kwargs)
self.key = '%s:%s' %(namespace, name)
def qsize(self):
"""Return the approximate size of the queue."""
return self.__db.llen(self.key)
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return self.qsize() == 0
def put(self, item):
"""Put item into the queue."""
self.__db.rpush(self.key, json.dumps(item))
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args block is true and timeout is None (the default), block
if necessary until an item is available."""
if block:
item = self.__db.blpop(self.key, timeout=timeout)
else:
item = self.__db.lpop(self.key)
if item:
item = item[1]
return json.loads(item)
def get_nowait(self):
"""Equivalent to get(False)."""
return self.get(False)
def get_queues(args):
task_queue = RedisQueue(name=args.task_queue_name, host=args.redis_host, port=args.redis_port)
result_queue = RedisQueue(name=args.result_queue_name, host=args.redis_host, port=args.redis_port)
return task_queue, result_queue
"""
Producer code
"""
class Producer(object):
def __init__(self, args):
self.args = args
self.start_ts = None
self.last_report = None
self.ops_since_last_report = 0
self.tasks = 0
def run(self):
self.start_ts = time.time()
self.last_report = self.start_ts
task_queue, result_queue = get_queues(self.args)
start_size = self.args.start_size
end_size = self.args.end_size
partition_size = self.args.partition_size
for i in xrange(start_size, end_size):
self.check_queues(task_queue, result_queue)
if i <= partition_size:
task_queue.put({
'prefix': [],
'length': i
})
self.tasks += 1
else:
prefix_size = i - partition_size
for row in generate_digit_sorted_numbers(prefix_size):
task_queue.put({
'prefix': row,
'length': i
})
self.tasks += 1
self.check_queues(task_queue, result_queue)
print("[{:8.3f}] {}".format(time.time() - self.start_ts, "Completed generating tasks."))
while self.tasks > 0:
self.check_queues(task_queue, result_queue)
time.sleep(10)
def check_queues(self, task_queue, result_queue):
queue_size = task_queue.qsize()
if self.last_report < time.time() - 60:
sys.stderr.write(str(datetime.datetime.now()) + "\n")
sys.stderr.write("Queued Tasks: " + str(queue_size) + "\n")
sys.stderr.write("Remaining Tasks: " + str(self.tasks) + "\n")
sys.stderr.write("Ops: " + str(self.ops_since_last_report / 60.0) + "/s\n\n")
self.ops_since_last_report = 0
self.last_report = time.time()
available_results = result_queue.qsize()
for i in range(available_results):
result_list = result_queue.get()
self.tasks -= 1
self.ops_since_last_report += 1
for narc in result_list:
print("[{:8.3f}] {}".format(time.time() - self.start_ts, narc))
"""
Consumer code
"""
def run_task(task):
prefix = task.get('prefix')
length = task.get('length')
init = 9 if len(prefix) == 0 else prefix[-1]
results = set()
pow_cache = [j ** length for j in range(0, 11)]
for suffix in generate_digit_sorted_numbers(length - len(prefix), init):
row = prefix + suffix
narc = compute_narc(row, pow_cache)
narc_digits = int_to_sorted_digits(narc)
if compare_lists(row, narc_digits):
results.add(narc)
return sorted(list(results))
def run_consumer(args):
try:
task_queue, result_queue = get_queues(args)
task = task_queue.get()
while task:
result = run_task(task)
result_queue.put(result)
task = task_queue.get()
except KeyboardInterrupt:
# if we kick out in the middle of a task, return it to the queue
task_queue.put(task)
pass
class Consumer(object):
def __init__(self, args):
self.args = args
self.num_processes = args.processes
self.pool = None
def run(self):
self.pool = Pool(self.num_processes)
worker_callbacks = []
for i in range(self.num_processes):
worker_callbacks.append(self.pool.apply_async(run_consumer, [self.args]))
worker_callbacks[0].get()
def shutdown(self):
if self.pool:
self.pool.shutdown()
def main(argv):
parser = argparse.ArgumentParser('Narcissistic Numbers Producer/Consumer tool')
parser.add_argument('--redis_host', type=str, default='localhost')
parser.add_argument('--redis_port', type=int, default=6379)
parser.add_argument('--role', type=str, default='worker')
parser.add_argument('--processes', type=int, default=1)
parser.add_argument('--task_queue_name', type=str, default='narc-tasks')
parser.add_argument('--result_queue_name', type=str, default='narc-results')
parser.add_argument('--partition_size', type=int, default=15)
parser.add_argument('--start_size', type=int, default=3)
parser.add_argument('--end_size', type=int, default=40)
args = parser.parse_args(argv[1:])
role = None
if args.role == 'producer':
role = Producer(args)
else:
role = Consumer(args)
try:
role.run()
except KeyboardInterrupt:
try:
role.shutdown()
except:
sys.exit(0)
if __name__ == "__main__":
main(sys.argv)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment