Skip to content

Instantly share code, notes, and snippets.

@eshleebien
Last active September 23, 2015 07:39
Show Gist options
  • Save eshleebien/90d4ae71e7744d159417 to your computer and use it in GitHub Desktop.
Save eshleebien/90d4ae71e7744d159417 to your computer and use it in GitHub Desktop.
python multiprocessing
import multiprocessing
import queue
import threading
import time
# from threads import client
# from config import config
# from util import sqs
max_thread_count = 3
max_process_count = 2
job_q = queue.Queue()
# test jobs
for i in range(5):
job_q.put({'channel_id': 'UC-TEST-' + str(i)})
class Fetcher(threading.Thread):
def __init__(self, name, d_data, l_fetchers):
threading.Thread.__init__(self)
self.fetchers = l_fetchers
self.name = name
def run(self):
print(self.name, 'Starting thread')
time.sleep(10)
# get from queue
self.fetchers.pop()
print(self.name, 'Ending thread')
def run_in_process(shared_job_q):
name = multiprocessing.current_process().name
l_active_fetchers = []
i_active_fetchers = 0
print(name, '- Spawned process')
fetchers = []
while True:
try:
if i_active_fetchers <= max_thread_count:
# get job from queue
d_channel = shared_job_q.get_nowait()
# for i in range(max_thread_count - i_active_fetchers):
l_active_fetchers.append(d_channel['channel_id'])
fetch = Fetcher(' ##' + name + 'thread-'
+ d_channel['channel_id'],
d_channel, l_active_fetchers)
fetchers.append(fetch)
fetch.start()
# for fetch in fetchers:
# fetch.join()
except queue.Empty:
print('done')
return
i_active_fetchers = len(l_active_fetchers)
# print('active fetchers for process ' + name, i_active_fetchers)
if __name__ == '__main__':
jobs = []
for i in range(max_process_count):
worker = multiprocessing.Process(name='my_process-'
+ str(i), target=run_in_process,
args=(job_q,))
jobs.append(worker)
worker.start()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment