Skip to content

Instantly share code, notes, and snippets.

@alispat
Forked from guenter/move_to_rds.rb
Last active August 29, 2015 14:26
Show Gist options
  • Save alispat/08401cb6a1103e1447f7 to your computer and use it in GitHub Desktop.
Save alispat/08401cb6a1103e1447f7 to your computer and use it in GitHub Desktop.
A quick and dirty script to move a database into Amazon RDS (or any other database). Can transfer part of the data beforehand.
require 'fileutils'
start_time = Time.now
SOURCE_DB = {
:name => 'db_name',
:user => 'db_user',
:password => 'db_pass',
:host => 'localhost'
}
TARGET_DB = {
:name => 'db_name',
:user => 'db_user',
:password => 'db_pass',
:host => 'foo.abcdef.us-east-1.rds.amazonaws.com'
}
DUMP_DIR = '/mnt/db_dump'
# These tables can be moved beforehand, and the diff later
INSERT_ONLY_TABLES = %w[click_tracking logs]
def benchmark(label = nil)
puts label unless label.nil?
before = Time.now
yield
after = Time.now
puts "Took %.3fs" % (after - before)
end
def host_user_password(db)
"--host='#{db[:host]}' --user='#{db[:user]}' --password='#{db[:password]}'"
end
def show_tables(db)
`mysql --execute='SHOW TABLES' --silent #{host_user_password(db)} #{db[:name]}`.split("\n")
end
def dump_db_tables(db, tables, dir, where = nil)
FileUtils.mkdir_p(dir, :mode => 0777)
where_arg = where.nil? ? '' : %(--where="#{where}")
benchmark("Dumping tables [#{tables.join(', ')}] to #{dir} #{where_arg}") do
system "mysqldump --verbose #{host_user_password(db)} --tab=#{dir} #{where_arg} #{LOCK_ARG} #{db[:name]} #{tables.join(' ')}"
end
end
def dump_db_before(db, migrate_before = [])
dir = "#{DUMP_DIR}/#{db[:name]}/before"
dump_db_tables(db, migrate_before, dir)
end
def dump_db_hot(db, migrate_before = [])
dir = "#{DUMP_DIR}/#{db[:name]}/hot"
tables = show_tables(db) - migrate_before
dump_db_tables(db, tables, dir)
end
def dump_db_diff(source, target, migrate_before = [])
dir = "#{DUMP_DIR}/#{source[:name]}/diff"
migrate_before.each do |table|
last_max_id = `mysql --execute='SELECT MAX(id) FROM #{table}' --silent #{host_user_password(target)} #{target[:name]}`.strip
dump_db_tables(source, [table], dir, "id > #{last_max_id}")
end
end
def dump_db_structure(db)
dump_file = "#{DUMP_DIR}/#{db[:name]}_structure.sql"
FileUtils.mkdir_p(DUMP_DIR, :mode => 0777)
benchmark("Dumping structure of #{db[:name]} to #{dump_file}") do
system "mysqldump --verbose #{host_user_password(db)} --no-data #{db[:name]} > #{dump_file}"
end
end
def import_db_structure(source, target)
dump_file = "#{DUMP_DIR}/#{source[:name]}_structure.sql"
benchmark("Importing structure of #{source[:name]} from #{dump_file}") do
system "mysql #{host_user_password(target)} #{target[:name]} < #{dump_file}"
end
end
def import_db_tables(target, tables, dir)
benchmark("Importing tables from #{dir}") do
dump_files = tables.map { |table| File.join(dir, "#{table}.txt") }
system "mysqlimport --local --compress --verbose #{host_user_password(target)} #{target[:name]} #{dump_files.join(' ')}"
end
end
def import_db_before(source, target, migrate_before = [])
dir = "#{DUMP_DIR}/#{source[:name]}/before"
import_db_tables(target, migrate_before, dir)
end
def import_db_hot(source, target, migrate_before = [])
dir = "#{DUMP_DIR}/#{source[:name]}/hot"
tables = show_tables(source) - migrate_before
import_db_tables(target, tables, dir)
end
def import_db_diff(source, target, migrate_before = [])
dir = "#{DUMP_DIR}/#{source[:name]}/diff"
import_db_tables(target, migrate_before, dir)
end
if ARGV.include?('--before')
# Don't lock tables while the site is still up
LOCK_ARG = "--skip-lock-tables"
# Run these before
dump_db_structure(SOURCE_DB)
dump_db_before(SOURCE_DB, INSERT_ONLY_TABLES)
import_db_structure(SOURCE_DB, TARGET_DB)
import_db_before(SOURCE_DB, TARGET_DB, INSERT_ONLY_TABLES)
else
LOCK_ARG = ""
# Run these when the site is offline
dump_db_hot(SOURCE_DB, INSERT_ONLY_TABLES)
dump_db_diff(SOURCE_DB, TARGET_DB, INSERT_ONLY_TABLES)
import_db_hot(SOURCE_DB, TARGET_DB, INSERT_ONLY_TABLES)
import_db_diff(SOURCE_DB, TARGET_DB, INSERT_ONLY_TABLES)
end
puts "Script ran for: %.3fs" % (Time.now - start_time)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment