Skip to content

Instantly share code, notes, and snippets.

@healiseu
Last active September 30, 2018 21:37
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save healiseu/5cc83248a4cad47c92c8a106cd0b1dc3 to your computer and use it in GitHub Desktop.
Save healiseu/5cc83248a4cad47c92c8a106cd0b1dc3 to your computer and use it in GitHub Desktop.
# Massive insertion of 1 million duplets of integer, float values using redis-py
# (C) By Athanassios I. Hatzis
# 30 Sept 2018
# Written to explain issue: https://github.com/andymccurdy/redis-py/issues/850
#
# OS Environment: Linux Ubuntu x64
# Processor: Intel(R) Core(TM) i3 CPU 540 @ 3.07GHz
# Memory 16 GB
# Redis Redis 64 bit, v4.0.8, running in standalone mode
#
import time
from redis import StrictRedis
from numpy import random as rnd
def floats_sample(size, range):
return (range[1]-range[0])*rnd.random_sample(size)+range[0]
def redis_dict(arr):
d = dict(enumerate(arr))
return {str(k): v for k, v in d.items()}
sample_size = 500000
sample_interval = (2000, 9000)
flt_arr = floats_sample(sample_size, sample_interval).astype('float32')
flt_scores = redis_dict(flt_arr)
rdb = StrictRedis(host='localhost', port='6379', db=9)
rdb.flushdb()
rdb.dbsize()
# %%%%%%%%%%%%%%%%%%%%%%%%%%% Start Timing Benchmark %%%%%%%%%%%%%%%%%%%%%%%%%%
t1_start = time.perf_counter()
t2_start = time.process_time()
rdb.zadd('FloatNDX', **flt_scores)
t1_stop = time.perf_counter()
t2_stop = time.process_time()
print(f"elapsed time for {sample_size} floats : {int(round((t1_stop-t1_start)))} [sec]")
print(f"cpu process time for {sample_size} floats: {int(round((t2_stop-t2_start)))} [sec]")
# For 500k floats
# elapsed time for 500000 floats : 6 [sec]
# cpu process time for 500000 floats: 5 [sec]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment