Last active
April 8, 2022 04:00
-
-
Save tegansnyder/41f6e982cb901e29c645 to your computer and use it in GitHub Desktop.
For those times when you don't have n98-magerun handy and you need a stripped down database dump using pure bash and mysqldumps. This creates a database dump without any production data or customers. Essentially the same thing as @development striped option in n98-magerun db:dump.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash - | |
#title :magedev_db.sh | |
#description :For those times when n98-magerun db:dump is unavailable | |
#author :Tegan Snyder | |
#notes :dev db backups in Magento via bash (no production data) | |
#============================================================================== | |
current_time=$(date "+%Y.%m.%d-%H.%M.%S") | |
# clean up tmp files | |
rm -rf /tmp/mage_skip_tables.tmp | |
# setup connection | |
read -p "Enter your MySQL server: " server | |
read -p "Enter your MySQL database: " db | |
read -p "Enter your MySQL username: " u | |
read -sp "Enter your MySQL password: " p | |
# wildcard tables that are not needed in a dev db | |
QUERY="$(cat <<EOF | |
SHOW TABLES | |
FROM $db | |
WHERE | |
Tables_in_$db LIKE '%customer_address%' | |
OR Tables_in_$db LIKE '%customer_entity%' | |
OR Tables_in_$db LIKE '%sales_order_aggregated%' | |
OR Tables_in_$db LIKE '%sales_order_tax%' | |
OR Tables_in_$db LIKE '%sales_flat%' | |
OR Tables_in_$db LIKE '%sales_recurring_%' | |
OR Tables_in_$db LIKE '%sales_refunded_%' | |
OR Tables_in_$db LIKE '%sales_payment_%' | |
OR Tables_in_$db LIKE '%enterprise_sales_%' | |
OR Tables_in_$db LIKE '%enterprise_customer_sales_%' | |
OR Tables_in_$db LIKE '%sales_bestsellers_%' | |
OR Tables_in_$db LIKE '%report_viewed_%' | |
OR Tables_in_$db LIKE '%catalogsearch_%'; | |
EOF | |
)" | |
# query for wildcard tables to skip | |
mysql -h $server -u $u -p"$p" $db -N -B -e "$QUERY" > /tmp/mage_skip_tables.tmp | |
# add additional tables to skip to tmp file | |
echo "log_url" >> /tmp/mage_skip_tables.tmp | |
echo "log_url_info" >> /tmp/mage_skip_tables.tmp | |
echo "log_visitor" >> /tmp/mage_skip_tables.tmp | |
echo "log_visitor_info" >> /tmp/mage_skip_tables.tmp | |
echo "log_visitor_online" >> /tmp/mage_skip_tables.tmp | |
echo "report_event" >> /tmp/mage_skip_tables.tmp | |
echo "report_compared_product_index" >> /tmp/mage_skip_tables.tmp | |
echo "dataflow_batch" >> /tmp/mage_skip_tables.tmp | |
echo "dataflow_batch_export" >> /tmp/mage_skip_tables.tmp | |
echo "dataflow_batch_import" >> /tmp/mage_skip_tables.tmp | |
echo "dataflow_import_data" >> /tmp/mage_skip_tables.tmp | |
echo "dataflow_session" >> /tmp/mage_skip_tables.tmp | |
echo "dump_command.sql" >> /tmp/mage_skip_tables.tmp | |
# read through tables we need to ignore and create | |
# concat them one by one into the ignore tables variable | |
IGNORETABLES="" | |
while read line | |
do | |
tbl=$line | |
if [ -n "$tbl" ]; then | |
IGNORETABLES="$IGNORETABLES --ignore-table=$db.$tbl" | |
fi | |
done < /tmp/mage_skip_tables.tmp | |
# dump the table definitons | |
mysqldump --no-data -h $server -u $u -p"$p" $db > /tmp/mage_table_definitions.sql | |
# dump the tables | |
mysqldump --no-create-info --skip-triggers --single-transaction -h $server -u $u -p"$p" $db $IGNORETABLES > /tmp/mage_database.sql | |
# combine the table definitons and table dumps to one file | |
cat /tmp/mage_table_definitions.sql /tmp/mage_database.sql > /tmp/backup-$current_time-$db.sql | |
echo "dump saved: /tmp/backup-$current_time-$db.sql" | |
# clean up | |
rm -rf /tmp/mage_skip_tables.tmp | |
rm -rf /tmp/mage_table_definitions.sql | |
rm -rf /tmp/mage_database.sql |
Note: When trying to import if you get a "definers" error, it is probably because you are trying to import into a database that doesn't have a matching mysql user for a trigger. I thought adding --no-create-info would prevent this, but it doesn't.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
# then rsync the file to the server you want rsync -aOvzu -e ssh /tmp/backup-2014.08.21-08.41.04-db.sql someother-server.com:/tmp
then import it:
mysql -h yourdb.name.com -u myusername -p uk < backup-2014.08.21-08.41.04-db.sql