Skip to content

Instantly share code, notes, and snippets.

@choadrocker
Created December 17, 2013 19:34
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save choadrocker/8011209 to your computer and use it in GitHub Desktop.
Save choadrocker/8011209 to your computer and use it in GitHub Desktop.
mysql dumping from and to in parallel - for backfilling or w/e
#!/bin/bash
max_threads=16
source=localhost
target=target
source_db=source_db
target_db=target_db
user=user
pass=pass
# grab existing vars and change them
tmp_vars=( wait_timeout net_read_timeout net_write_timeout )
for h in ${source} ${target}; do
for var in ${tmp_vars[@]}; do
export ${h//-/_}_${var}=$(mysql -h ${h} -u${user} -p${pass} -BN -e "select @@global.${var};")
mysql -h ${h} -u${user} -p${pass} -BN -e "set @@global.${var}=60000;"
done
done
# get a list of tables in the db were syncing
tables=$( mysql -h ${source} -u${user} -p${pass} ${source_db} -BN -e "show tables;" )
for t in ${tables[@]}; do
threads=$( jobs | wc -l )
if [ ${threads} -gt ${max_threads} ]; then
while [ ${threads} -gt ${max_threads} ]; do
sleep 1
threads=$( jobs | wc -l )
done
fi
echo "Dumping ${t}"
mysqldump -h ${source} -u${user} -p${pass} --max_allowed_packet=1G --single-transaction -centRKq ${source_db} ${t} | mysql -h ${target} -u${user} -p${pass} --max_allowed_packet=1G -C ${target_db} &
done
j=$(ps -ef | grep mysq[l]dump | wc -l)
while [ ${j} -gt 0 ]; do
printf "\e[5m${j} jobs running...\e[m"
sleep 10
j=$(ps -ef | grep mysq[l]dump | wc -l)
printf "\r"
done
## set them back
for h in ${source} ${target}; do
for var in ${tmp_vars[@]}; do
mysql -h ${h} -u${user} -p${pass} -BN -e "set @@global.${var}=$(eval echo \$${h//-/_}_${var})"
done
done
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment