Skip to content

Instantly share code, notes, and snippets.

Created Jul 19, 2018
What would you like to do?
# coding:utf8
import redis
import logging
import click
from itertools import izip_longest
# hosts = ('', '', '', '')
Setup: pip install click redis
Usage: python --host=your_host_here --pattern=your_key_pattern
dest = {
'host': 'your host here',
'port': 6379, # change it
'db': 1,
'password': 'your password here',
src = {
'port': 6380, # change it
'db': 1,
'password': 'your password here',
def dump_and_restore(dest_conn, package):
:type dest_conn: redis.Redis
pipe = dest_conn.pipeline()
for key, value, ttl in package:
pipe.restore(key, ttl=ttl, value=value, replace=True)"key: %s, value: %s, ttl: %d", key, value, ttl)
result = pipe.execute()
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
@click.option('--host', help="redis host")
@click.option('--pattern', help="key pattern")
def migrate(host, pattern):
format='%(asctime)s s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename='/tmp/migrate-%s.log' % host,
src_conn = redis.Redis(host=host, **src)
_iter = src_conn.scan_iter(match=pattern, count=100)
dest_conn = redis.Redis(**dest)
# 不知道数据规模,假设有 100w
with click.progressbar(_iter, label="Restore", length=10000000, show_percent=True, show_pos=True) as keys:
chunks = grouper(keys, 100)
for chunk in chunks:
pipe = src_conn.pipeline(transaction=False)
for key in chunk:
if key is None:
result = pipe.execute()
pipe = src_conn.pipeline(transaction=False)
for key in chunk:
if key is None:
ttl_result = map(lambda v: v or 0, pipe.execute())
dump_and_restore(dest_conn, zip(chunk, result, ttl_result))
if __name__ == '__main__':
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment