Skip to content

Instantly share code, notes, and snippets.

@nuno-faria
Created January 14, 2022 11:43
Show Gist options
  • Save nuno-faria/304f61c42eb100951e959f1db0a61a46 to your computer and use it in GitHub Desktop.
Save nuno-faria/304f61c42eb100951e959f1db0a61a46 to your computer and use it in GitHub Desktop.
import pyodbc
import random
from string import ascii_lowercase
from multiprocessing import Pool
import time
TIME = 30
THREADS = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
AUTOCOMMIT = True
BATCH_SIZE = 1 # applicable only if AUTOCOMMIT = False
TABLES = 1
CONN_STR = 'DRIVER={/usr/lib/x86_64-linux-gnu/libMonetODBC.so};HOST=localhost;PORT=50000;DATABASE=testdb;UID=monetdb;PWD=monetdb'
def recreate_db():
conn = pyodbc.connect(CONN_STR)
conn.autocommit = True
cursor = conn.cursor()
for table in range(TABLES):
try:
cursor.execute(f'drop table test_{table}')
except:
pass
try:
cursor.execute(f'create table test_{table} (k varchar(255), v int)')
except:
pass
conn.close()
def run_thread(args):
duration, table_idx = args
conn = pyodbc.connect(CONN_STR)
conn.autocommit = AUTOCOMMIT
cursor = conn.cursor()
random_str = ''.join([ascii_lowercase[random.randint(0, len(ascii_lowercase) - 1)] for _ in range(10)])
begin = time.time()
txs = 0
while time.time() - begin <= duration:
if AUTOCOMMIT:
cursor.execute(f'insert into test_{table_idx} values(?, ?)', random_str + str(txs), txs)
txs += 1
else:
for _ in range(BATCH_SIZE):
cursor.execute(f'insert into test_{table_idx} values(?, ?)', random_str + str(txs), txs)
txs += 1
cursor.execute('commit')
return txs
print(f'Running benchmark (duration: {TIME}s; threads: {THREADS}; autocommit: {AUTOCOMMIT}; batch size: {BATCH_SIZE}; tables: {TABLES})')
for threads in THREADS:
recreate_db()
begin = time.time()
pool = Pool(threads)
tables = list(range(TABLES))
results = pool.map(run_thread, [(TIME, tables[i % len(tables)]) for i in range(threads)])
total_txs = sum(results)
end = time.time() - begin
print(f'{threads} threads: {total_txs / (time.time() - begin)} inserts/s')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment