Skip to content

Instantly share code, notes, and snippets.

@MagerValp
Created September 15, 2012 08:51
Show Gist options
  • Save MagerValp/3727038 to your computer and use it in GitHub Desktop.
Save MagerValp/3727038 to your computer and use it in GitHub Desktop.
rsync wrapper script
- .DS_Store
- /Shared
- /Guest
- /*/Library/Application Support/SyncServices/data.version
- /*/Library/Application Support/Firefox/Crash Reports
- /*/Library/Caches
- /*/Library/Logs
- /*/Library/Mail/Envelope Index
- /*/Library/Mail/Envelope Index-journal
- /*/Library/Mail/AvailableFeeds
- /*/Library/Mail/Metadata/BackingStoreUpdateJournal
- /*/Library/Mail/V2/MailData/Envelope Index
- /*/Library/Mail/V2/MailData/Envelope Index-journal
- /*/Library/Mail/V2/MailData/AvailableFeeds
- /*/Library/Mail/V2/MailData/BackingStoreUpdateJournal
- /*/Library/Mail/V2/MailData/Envelope Index-shm
- /*/Library/Mail/V2/MailData/Envelope Index-wal
- /*/Library/Mirrors
- /*/Library/PubSub/Database
- /*/Library/PubSub/Downloads
- /*/Library/PubSub/Feeds
- /*/Library/Safari/Icons.db
- /*/Library/Safari/WebpageIcons.db
- /*/Library/Safari/HistoryIndex.sk
- /*/.FileSync
- /*/.Trash
- /*/Downloads
- /*/.dropbox
- /*/Dropbox
- /*/Movies
- /*/Music
- /*/Pictures
#!/bin/bash
#
# Create configuration for DAFGU backup agent.
# We have a launchd job here.
launchdplist="$3/Library/LaunchDaemons/se.gu.gu.DAFGUBackup.plist"
# We store configuration files here.
dafgudir="$3/Users/.dafgu"
# Create configuration directory.
echo "Creating $dafgudir"
# Re-use old backup UUID if it exists, otherwise generate a new.
if [ -d "$dafgudir" -a -n $(find "$dafgudir" -maxdepth 1 -name 'backupuuid-*' -print -quit) ]; then
backupuuid=`cat "$dafgudir/backupuuid-"*`
echo "Reusing backupuuid: $backupuuid"
else
backupuuid=`uuidgen`
echo "Generated new backupuuid: $backupuuid"
fi
# Get the current hostname.
hostname=`scutil --get LocalHostName`
echo "Using hostname: $hostname"
# Get the primary MAC address.
macaddr=`ifconfig en0 | awk '/ether/ {print $2}'`
echo "Using MAC address: $macaddr"
# Create a fresh dafgudir.
rm -rf "$dafgudir"
mkdir -p "$dafgudir"
echo $backupuuid > "$dafgudir/backupuuid-$backupuuid"
echo $hostname > "$dafgudir/hostname-$hostname"
echo $macaddr > "$dafgudir/macaddr-$macaddr"
# Disable sleep when not on battery power.
pmset -c sleep 0
# Install launchd job.
echo "Installing $launchdplist"
# Unload old launchd job if needed.
if [ -f "$launchdplist" ]; then
echo "Unloading old launchd job"
/bin/launchctl unload "$launchdplist"
echo "Removing old launchd plist"
rm -f "$launchdplist"
fi
# Create a new one.
echo "Creating $launchdplist"
cat > "$launchdplist" <<EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>se.gu.gu.DAFGUBackup</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/munki/run_backup.py</string>
<string>--verbose</string>
<string>--randomdelay</string>
<string>3600</string>
<string>/Users/</string>
<string>dafgubackup@backupserver.example.com:$backupuuid</string>
</array>
<key>StandardOutPath</key>
<string>/var/log/dafgubackup.log</string>
<key>StandardErrorPath</key>
<string>/var/log/dafgubackup.log</string>
<key>StartInterval</key>
<integer>60</integer>
</dict>
</plist>
EOF
# Load the launchd job.
echo "Loading launchd job"
/bin/launchctl load "$launchdplist"
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Wrapper script for rsync.
#
# This script checks network connectivity and if it matches certain criteria,
# currently that TEST_HOST is reachable over Ethernet, it performs a backup
# over rsync. Output from rsync is saved to BACKUP_STATUS_PLIST, and progress
# messages are written to STATUS_MENU_PLIST.
import sys
import optparse
import subprocess
import time
import random
import os.path
import logging
import re
import datetime
BACKUP_STATUS_PLIST = "/Library/Preferences/DAFGUBackupStatus.plist"
FILTER_FILE = os.path.join(os.path.dirname(__file__), "dafgu_filter.txt")
# This server is used to determine network connectivity
TEST_HOST = "macmig.gu.gu.se"
STATUS_MENU_PLIST = "/tmp/DAFGUMigrationStatus.plist"
STATUS_UNKNOWN = 0
STATUS_OK = 1
STATUS_ERROR = 2
STATUS_ACTIVE = 3
from Foundation import NSData, \
NSPropertyListSerialization, \
NSPropertyListMutableContainers, \
NSPropertyListXMLFormat_v1_0
class FoundationPlistException(Exception):
pass
class NSPropertyListSerializationException(FoundationPlistException):
pass
class NSPropertyListWriteException(FoundationPlistException):
pass
def writePlist(data, path):
plistData, error = NSPropertyListSerialization.dataFromPropertyList_format_errorDescription_(
data, NSPropertyListXMLFormat_v1_0, None)
if error:
raise NSPropertyListSerializationException(error)
else:
if plistData.writeToFile_atomically_(path, True):
return
else:
raise NSPropertyListWriteException("Failed to write plist data to %s" % path)
def readPlist(path):
plistData = NSData.dataWithContentsOfFile_(path)
dataObject, plistFormat, error = \
NSPropertyListSerialization.propertyListFromData_mutabilityOption_format_errorDescription_(
plistData, NSPropertyListMutableContainers, None, None)
if error:
errmsg = "%s in file %s" % (error, path)
raise NSPropertyListSerializationException(errmsg)
else:
return dataObject
def set_status_menu(status, message):
try:
time.sleep(1) # Ensure a unique timestamp for each update.
writePlist({
"DAFGUMigrationStatus": status,
"DAFGUMigrationMessage": message,
}, STATUS_MENU_PLIST)
except:
pass
def get_status_menu():
try:
status = readPlist(STATUS_MENU_PLIST)
return (status["DAFGUMigrationStatus"], status["DAFGUMigrationMessage"])
except:
return (None, None)
def route(cmd, *opts):
"""Python wrapper for /sbin/route command."""
p = subprocess.Popen(["/sbin/route",
cmd] + list(opts),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate()
return out
re_interface = re.compile(r'^\s*interface: (?P<dev>.+)$')
def get_route_dev(host):
"""Find which network interface access to 'host' is routed through."""
for line in route("get", host).splitlines():
m = re_interface.search(line)
if m:
logging.debug(u"Found route to %s through %s" % (host, m.group("dev")))
return m.group("dev")
else:
return None
def networksetup(cmd, *opts):
"""Python wrapper for /usr/sbin/networksetup command."""
p = subprocess.Popen(["/usr/sbin/networksetup",
"-" + cmd] + list(opts),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate()
return out
re_srv = re.compile(r'^\([0-9*]+\) (?P<srv>.+)$')
re_port_dev = re.compile(r'^\(Hardware Port: (?P<port>.+), Device: (?P<dev>.*)\)$')
re_info = re.compile(r'^(?P<key>[^:]+): (?P<value>.*)$')
re_cur_net = re.compile(r'^Current \S+ Network: (?P<name>)$')
def get_devices():
"""Generate a list of network device dictionaries."""
devices = list()
for line in networksetup("listnetworkserviceorder").splitlines():
m = re_srv.search(line)
if m:
srv = m.group("srv")
m = re_port_dev.search(line)
if m:
logging.debug(u"Found network service %s" % srv)
devices.append({
"srv": srv,
"port": m.group("port") if m.group("port") else None,
"dev": m.group("dev") if m.group("dev") else None,
})
for dev in devices:
if dev["srv"]:
for line in networksetup("getinfo", dev["srv"]).splitlines():
m = re_info.search(line)
if m:
dev[m.group("key")] = m.group("value") if m.group("value") != "none" else None
return dict([(dev["dev"], dev) for dev in devices])
def check_device_class(host):
"""Check what class of network interface is used to access 'host'."""
dev = get_route_dev(host)
if not dev:
logging.warn(u"No route to %s" % host)
return "unknown"
devices = get_devices()
if dev not in devices:
logging.warn(u"Failed to get information for %s" % dev)
return "unknown"
for name, matchre in (
("ethernet", re.compile(r'ethernet', re.I)),
("wifi", re.compile(r'(wi-fi|airport)', re.I)),
):
if matchre.search(devices[dev]["port"]):
return name
else:
return "unknown"
def parse_session_statistics(text):
stats = dict()
for line in text.split("\n"):
try:
key, value = line.split(": ", 1)
stats[key] = value
except ValueError:
pass
return stats
def wash_returncode(returncode):
"""Ignore certain error codes."""
if returncode == 24:
# Partial transfer due to vanished source files
return 0
else:
return returncode
def run_backup(source_dir, dest_dir):
backup_cmd = (u"/usr/local/bin/rsync3",
u"--iconv=UTF-8-MAC,UTF-8",
u"--recursive",
u"--links",
u"--times",
u"--executability",
u"--compress",
u"--stats",
u"--partial-dir=.rsync-partial",
u"--filter=merge %s" % FILTER_FILE,
u"--delete-after",
u"--delete-excluded",
u"--ignore-errors",
u"--timeout=300",
u"--rsh=ssh -c blowfish-cbc -p 2202",
source_dir.encode("utf-8"),
dest_dir.encode("utf-8"))
logging.debug(u" ".join(backup_cmd))
p = subprocess.Popen(backup_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
now = time.time()
result_dict = {
"last_run": now,
"returncode": wash_returncode(p.returncode),
"stdout": stdout.decode("utf-8", "ignore"),
"stderr": stderr.decode("utf-8", "ignore"),
}
result_dict.update(parse_session_statistics(result_dict["stdout"]))
return result_dict
def main(argv):
p = optparse.OptionParser()
p.set_usage("""Usage: %prog [options] source_dir dest_dir""")
p.add_option("-v", "--verbose", action="store_true")
p.add_option("-d", "--randomdelay", type="int", dest="randomdelay")
options, argv = p.parse_args(argv)
if len(argv) != 3:
print >>sys.stderr, p.get_usage()
return 1
source_dir = argv[1].decode("utf-8")
dest_dir = argv[2].decode("utf-8")
if options.verbose:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logging.basicConfig( \
level=log_level, \
format="%(asctime)s %(filename)s[%(process)d]: %(message)s" \
)
(status, message) = get_status_menu()
if status == STATUS_ACTIVE:
set_status_menu(STATUS_ERROR, u"Last backup interrupted")
elif status in (STATUS_OK, STATUS_ERROR):
set_status_menu(status, message)
else:
set_status_menu(STATUS_UNKNOWN, u"Waiting for network…")
if not os.path.exists(BACKUP_STATUS_PLIST):
logging.warn(u"No previous backup run found, initializing first backup")
options.verbose = True
logging.info(u"Sleeping for 30 seconds")
time.sleep(30)
elif options.randomdelay:
sleep_time = random.randrange(0, options.randomdelay)
logging.info(u"Sleeping for %d seconds" % sleep_time)
time.sleep(sleep_time)
set_status_menu(STATUS_ACTIVE, u"Checking network…")
logging.debug(u"Checking network class...")
net_class = check_device_class(TEST_HOST)
logging.info(u"Network is '%s'" % net_class)
if net_class != "ethernet":
logging.warn(u"Aborting migration backup as network isn't ethernet")
(status, message) = get_status_menu()
set_status_menu(status, message)
return 0
set_status_menu(STATUS_ACTIVE, u"Running backup…")
logging.info(u"Backing up %s to %s".encode("utf-8") % (source_dir, dest_dir))
status_dict = run_backup(source_dir, dest_dir)
logging.info(u"Backup finished with return code %d" % status_dict["returncode"])
set_status_menu(STATUS_ACTIVE, u"Finishing…")
logging.debug(u"Saving status to %s".encode("utf-8") % BACKUP_STATUS_PLIST)
writePlist(status_dict, BACKUP_STATUS_PLIST)
logging.info(u"Done.")
dt = datetime.datetime.fromtimestamp(status_dict["last_run"])
if status_dict["returncode"] == 0:
set_status_menu(STATUS_OK, dt.strftime("%Y-%m-%d %H:%M"))
else:
set_status_menu(STATUS_ERROR, u"Backup failed")
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment