Skip to content

Instantly share code, notes, and snippets.

@KuangHao95
Last active December 18, 2019 01:32
Show Gist options
  • Save KuangHao95/7623e0c76574ec86c9344f81c37fff35 to your computer and use it in GitHub Desktop.
Save KuangHao95/7623e0c76574ec86c9344f81c37fff35 to your computer and use it in GitHub Desktop.
Sample Code for using "concurrent.futures"
# 2. Multi-core acceleration
# Adapted from: https://stackoverflow.com/questions/42941584/
import concurrent.futures
from collections import deque
# multi-thread for IO tasks
TPExecutor = concurrent.futures.ThreadPoolExecutor
# multi-process for CPU tasks
PPExecutor = concurrent.futures.ProcessPoolExecutor
def get_file(path):
# IO task: read data
with open(path) as f:
data = f.read()
return data
def process_large_file(s):
# CPU task: process data
return sum(ord(c) for c in s)
# Prepare lists of items to preocess, in this sample: multiple files
files = [filename0, filename1, filename2, filename3, filename4,
filename5, filename6, filename7, filename8, filename9]
results = []
completed_futures = collections.deque()
def callback(future, completed=completed_futures):
completed.append(future)
# For Windows, below sections should be included in "__main__"
with TPExecutor(max_workers = 4) as thread_pool_executor:
data_futures = [thread_pool_executor.submit(get_file, path) for path in files]
with PPExecutor() as process_pool_executor:
for data_future in concurrent.futures.as_completed(data_futures):
future = process_pool_executor.submit(process_large_file, data_future.result())
future.add_done_callback(callback)
# collect any job that has finished
while completed_futures:
results.append(completed_futures.pop().result())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment