Skip to content

Instantly share code, notes, and snippets.

@madslundt
Created August 24, 2017 20:45
Show Gist options
  • Save madslundt/9da5386ab037e91483928a69b802a83b to your computer and use it in GitHub Desktop.
Save madslundt/9da5386ab037e91483928a69b802a83b to your computer and use it in GitHub Desktop.
#!/bin/bash
# Edit me
rclone_bin="/rclone/rclone"
rclone_options="--config=/rclone/rclone.conf --buffer-size 500M --checkers 16"
rclone_cloud_endpoint="gd:"
local_dir="/documents"
limit_gb=750
################################################################################
if pidof -o %PPID -x "$(basename "$0")"; then
echo "[ $(date +%F@%T) ] Upload already in progress. Aborting."
exit 3
fi
today_gb=$limit_gb
# Generate filelist and iterate through it...
find "${local_dir}" -type f |
while read -r n; do
# Find the pathname relative to the root of your remote and store filename
filename="$(echo "$n" | sed -e s@"${local_dir}"@@)"
destpath="$(dirname "$n" | sed -e s@"${local_dir}"@@)"
# Skip hidden or partial files.
case "$n" in
(*.partial~) continue ;;
(*_HIDDEN~) continue ;;
(*.QTFS) continue ;;
(*.fuse*) continue ;;
(.DS_STORE) continue ;;
esac
fileSize=$(du -sb "$n" | awk '{print $1}')
fileSizeGb=$(($fileSize / 1000 / 1000 / 1000))
today_gb=$(($today_gb-$fileSizeGb))
if [ "$today_gb" -le "0" ]; then
echo "Transferred $(($limit_gb-$fileSizeGb)) GB today. Waiting 24 hours"
sleep 24h
today_gb=$limit_gb
fi
# If file is opened by another process, wait until it isn't.
while [ "$(lsof "$n" >/dev/null 2>&1)" ] || \
[ "$(lsof "${local_dir}/${n}" >/dev/null 2>&1)" ]; do
echo "[ $(date +%F@%T) ] File -> ${n} in use. Retrying in 10 seconds."
sleep 10
done
# Copy file to remote destination[s], retaining path
echo "[ $(date +%F@%T) ] Transfering file -> ${n} to ${rclone_cloud_endpoint} in ${destpath}."
"${rclone_bin}" copy $rclone_options "$n" "${rclone_cloud_endpoint}${destpath}" >/dev/null 2>&1
done
# success!
exit 02
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment