Skip to content

Instantly share code, notes, and snippets.

@viroos
Created Mar 27, 2021
Embed
What would you like to do?
import multiprocessing
from multiprocessing import Value, Pool
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
import pandas as pd
from pathlib import Path
import sys
output_csv = 'output.csv'
base_dir = 'base_dir'
def read_file(file):
with open(file,'r') as f:
file_content = f.read()
with open(output_csv, "a") as out:
for line in file_content:
out.write(line)
with counter.get_lock():
counter.value += 1
if __name__ == "__main__":
files = Path(base_dir).rglob('*.csv')
lock = multiprocessing.Lock()
counter = Value('i',0)
with Pool(100) as pool:
pool.map(read_file, files)
# df.to_csv(output_csv, index=False)
print(counter.value)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment