Skip to content

Instantly share code, notes, and snippets.

@cab
Created November 21, 2016 22:31
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save cab/ee587184a40d4464b31a8f6a7357dd24 to your computer and use it in GitHub Desktop.
Save cab/ee587184a40d4464b31a8f6a7357dd24 to your computer and use it in GitHub Desktop.
## SAVING
if [[ $BUILDKITE_BRANCH == "master" ]]; then
echo "+++ :docker: Saving image $COMPOSE_SERVICE_DOCKER_IMAGE_NAME"
local name="${BUILDKITE_PIPELINE_SLUG}_${BUILDKITE_BRANCH}_${COMPOSE_SERVICE_NAME}"
local slug=/tmp/docker-cache/$name.tar.gz
local BUILDKITE_IMAGE_CACHE_BUCKET="xxx-docker-cache"
local images_file=s3://$BUILDKITE_IMAGE_CACHE_BUCKET/$name.images
local images=$(echo $(docker images -a | grep $(echo $BUILDKITE_JOB_ID | sed 's/-//g') | awk '{print $1}' | xargs -n 1 docker history -q | grep -v '<missing>'))
if [[ -n $images ]] && ( ! aws s3 ls $images_file || [[ "$images" != $(aws s3 cp $images_file -) ]]) ; then
rm -rf /tmp/docker-cache
mkdir -p /tmp/docker-cache
docker save $images | gzip -c > $slug
aws s3 cp $slug s3://$BUILDKITE_IMAGE_CACHE_BUCKET/$name.tar.gz
echo "$images" | aws s3 cp - s3://$BUILDKITE_IMAGE_CACHE_BUCKET/$name.images
fi
fi
# ...
## LOADING
echo "+++ :docker: Fetching cached docker images"
# see if we are missing any of the images locally, and load them if we are
(
BUILDKITE_IMAGE_CACHE_BUCKET="xxx-docker-cache"
name="${BUILDKITE_PIPELINE_SLUG}_${BUILDKITE_BRANCH}_${COMPOSE_SERVICE_NAME}"
backup_name="${BUILDKITE_PIPELINE_SLUG}_master_${COMPOSE_SERVICE_NAME}"
images_file=s3://$BUILDKITE_IMAGE_CACHE_BUCKET/$name.images
backup_images_file=s3://$BUILDKITE_IMAGE_CACHE_BUCKET/${backup_name}.images
has_images=$(docker inspect $(aws s3 cp $images_file -) > /dev/null; echo $?)
has_backup_images=$(docker inspect $(aws s3 cp $backup_images_file -) > /dev/null; echo $?)
if aws s3 ls $images_file && [[ $has_images -eq 1 ]]; then
echo "Downloading cache"
aws s3 cp s3://$BUILDKITE_IMAGE_CACHE_BUCKET/$name.tar.gz - | gunzip -c | docker load
elif aws s3 ls $backup_images_file && [[ $has_backup_images -eq 1 ]]; then
echo "Downloading backup cache (master)"
aws s3 cp s3://$BUILDKITE_IMAGE_CACHE_BUCKET/${backup_name}.tar.gz - | gunzip -c | docker load
else
echo "No cache found"
fi
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment