Last active
August 29, 2015 14:06
-
-
Save twidi/1182a2055956474c9ee9 to your computer and use it in GitHub Desktop.
manage_dropbox_images.sh: The goal of this script is to reduce the size of the dropbox folder where are automatically uploaded images taken with a smartphone, by linking to the same images found in other folders (for example if all images of your smartphone are regularly uploaded to a specific folder)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# The goal of this script is to reduce the size of the dropbox folder | |
# where are automatically uploaded images taken with a smartphone, by | |
# linking to the same images found in other folders (for example if all | |
# images of your smartphone are regularly uploaded to a specific folder) | |
DIR_CHARG=~/Dropbox/Chargements\ appareil\ photo/ | |
DIR_IMG=~/Dropbox/Photos/Images/ | |
LANG=C | |
# set -x | |
cd "$DIR_CHARG" | |
# ignore images that are already links | |
IMAGES=`find ./ -name "*.jpg" -type f | sort` | |
# we'll split found images by line, ignoring spaces in names | |
IFS=" | |
" | |
# ok loop on all found images | |
for IMG_CHARG in $IMAGES | |
do | |
# keep only the filename | |
IMG_NAME=`basename "$IMG_CHARG"` | |
# get the exif date of the image | |
EXIV_CHARG=`exiv2 pr "$IMG_CHARG" | grep 'Image timestamp'| cut -c19-` | |
# easily find other images, ie IMG_20140920_121314 will be a match for 2014-09-20-12-13-14 | |
IMG_RE=`echo "$IMG_NAME" | sed -e 's/[^0-9]\+/\\*/g'` | |
# get the first image with a matching name | |
IMG_ORIG=`find "$DIR_IMG" -name "*$IMG_RE.jpg" | head -1` | |
# if an image is found, we'll ensure the equlity of the exif dates | |
if [ "$IMG_ORIG" ] | |
then | |
# get the exif date of the found image | |
EXIV_ORIG=`exiv2 pr "$IMG_ORIG" | grep 'Image timestamp'| cut -c19-` | |
if [ "$EXIV_CHARG" != "$EXIV_ORIG" ] | |
then | |
# if the exif dates don't match, do as we didn't have found this image | |
IMG_ORIG="" | |
fi | |
fi | |
# we didn't found any matching image, so we'll check by exif date | |
if [ ! "$IMG_ORIG" ] | |
then | |
# but we can't if the original image didn't have an exif date | |
if [ "$EXIV_CHARG" ] | |
then | |
# check for images in the same minutes decade as the exif date | |
DATE_RE=`echo "$EXIV_CHARG" | cut -c1-15 | sed -e 's/[^0-9]\+/\\*/g'` | |
MATCH_IMGS=`find "$DIR_IMG" -name "*$DATE_RE[0-9]*[0-9][0-9]*.jpg"` | |
# if not found, check for the same hour | |
if [ ! "$MATCH_IMGS" ] | |
then | |
DATE_RE=`echo "$EXIV_CHARG" | cut -c1-13 | sed -e 's/[^0-9]\+/\\*/g'` | |
MATCH_IMGS=`find "$DIR_IMG" -name "*$DATE_RE*[0-9][0-9]*[0-9][0-9]*.jpg"` | |
fi | |
# if not found, check for the same day | |
if [ ! "$MATCH_IMGS" ] | |
then | |
DATE_RE=`echo "$EXIV_CHARG" | cut -c1-10 | sed -e 's/[^0-9]\+/\\*/g'` | |
MATCH_IMGS=`find "$DIR_IMG" -name "*$DATE_RE*[0-9][0-9]*[0-9][0-9]*[0-9][0-9]*.jpg"` | |
fi | |
# check for a match in exif dates in the found images | |
for MATCH_IMG in $MATCH_IMGS | |
do | |
# get the exif date of the found image | |
EXIV_MATCH=`exiv2 pr "$MATCH_IMG" | grep 'Image timestamp'| cut -c19-` | |
if [ "$EXIV_CHARG" == "$EXIV_MATCH" ] | |
then | |
# we have a match, keep the image | |
IMG_ORIG=$MATCH_IMG | |
break; | |
fi | |
done | |
# no image found with the correct exif date, stop here | |
if [ ! "$IMG_ORIG" ] | |
then | |
echo "$IMG_CHARG => [FAILURE] NO IMG FOUND for exif date '$EXIV_CHARG'" | |
fi | |
# no exif date found in the original file, stop here | |
else | |
echo "$IMG_CHARG => [FAILURE] NO EXIF DATE FOUND" | |
fi | |
fi | |
# an image is found, link to it | |
if [ "$IMG_ORIG" ] | |
then | |
# get the relative path to make the link | |
IMG_ORIG=`python -c "import os.path; print os.path.relpath('$IMG_ORIG')"` | |
echo "$IMG_NAME => $IMG_ORIG" | |
# and make the final link | |
ln -f -s "$IMG_ORIG" "$IMG_NAME" | |
fi | |
done |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment