Skip to content

Instantly share code, notes, and snippets.

@bdclark
Last active August 29, 2015 14:04
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bdclark/8c2b48b7760d373b5cfe to your computer and use it in GitHub Desktop.
Save bdclark/8c2b48b7760d373b5cfe to your computer and use it in GitHub Desktop.
Simple LDIF export
#!/usr/bin/env bash
set -e
usage() {
cat <<EOF
Usage: $0 H|D|W|M
Export ldif in gzip format, send to S3 bucket
Assumes bucket policy will handle file lifecycles
-H write to hourly folder in bucket
-D write to daily folder in S3 bucket
-W write to weekly folder in S3 bucket
-M write to monthly folder in S3 bucket
Additionaly settings within script
EOF
}
host='myhost'
port='10389'
base_dn='o=exampleorg'
datetime=$(date +"%Y-%m-%d_%Hh%Mm")
ldif_file="ldap_export_${datetime}.ldif"
export_dir='/my/backup/directory'
s3_bucket='s3://mybackupbucket/ldap'
while getopts ":hHDWM" opt; do
case $opt in
h) usage >&2
exit 1
;;
H) folder='hourly'
;;
D) folder='daily'
;;
W) folder='weekly'
;;
M) folder='monthly'
;;
\?) echo "Invalid option: -$OPTARG" >&2
exit 1
;;
esac
done
if [ -z ${folder+x} ]; then
echo "Must supply a valid option"
exit 1
fi
cd $export_dir
# Build top-level OU structure
cat << EOF > $ldif_file
version: 1
dn: ou=users,${base_dn}
objectClass: organizationalUnit
objectClass: top
ou: users
dn: ou=groups,${base_dn}
objectClass: organizationalUnit
objectClass: top
ou: groups
EOF
# Append contents of users OU to file
ldapsearch -LLL -h $host -p $port -x -b "ou=groups,${base_dn}" '(!(objectClass=organizationalUnit))' >> $ldif_file
# Append contents of groups OU to file
ldapsearch -LLL -h $host -p $port -x -b "ou=users,${base_dn}" '(!(objectClass=organizationalUnit))' >> $ldif_file
gzip $ldif_file --force
zip_file="${ldif_file}.gz"
# Do S3 stuff
s3_path="${s3_bucket}/${folder}/${zip_file}"
aws s3 cp $zip_file $s3_path > /dev/null 2>&1 || { echo "Error copying $zip_file to $s3_path"; exit 1; }
echo "Sucessfully exported $ldif_file and pushed to $s3_bucket/$folder"
# Delete all export gzips except current
find . -maxdepth 1 -type f -name "ldap_export*.gz" | grep -v $zip_file | xargs rm -f
#!/usr/bin/env bash
# This is an example wrapper in cron.hourly (use same command from cron.daily with -D, etc.)
AWS_CONFIG_FILE=/root/.aws/config /my/backup/directory/ldap_export.sh -H 2>&1 | /usr/bin/logger -t ldap_export-hourly
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment