Skip to content

Instantly share code, notes, and snippets.

@mornir
Created December 25, 2019 20:13
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save mornir/066d3cf37a3d34857da7c8629465ab01 to your computer and use it in GitHub Desktop.
Save mornir/066d3cf37a3d34857da7c8629465ab01 to your computer and use it in GitHub Desktop.
Netlify Cloud function for content backup of Sanity CMS to Google Drive with notification to Slack
const differenceInDays = require('date-fns/differenceInDays')
const parseISO = require('date-fns/parseISO')
const sanity = require('@sanity/client')
const exportDataset = require('@sanity/export')
const { google } = require('googleapis')
const fetch = require('node-fetch')
const path = require('path')
const fs = require('fs')
const FOLDER_ID = process.env.FOLDER_ID
const DATASET = process.env.DATASET
const sanityClient = sanity({
projectId: process.env.PROJECT_ID,
dataset: DATASET,
token: process.env.SANITY_TOKEN,
useCdn: false,
})
async function backup() {
await exportDataset({
// Instance of @sanity/client configured to correct project ID and dataset
client: sanityClient,
// Name of dataset to export
dataset: DATASET,
// Path to write zip-file to
outputPath: path.join('/tmp', `${DATASET}.tar.gz`),
// Whether or not to export assets. Note that this operation is currently slightly lossy;
// metadata stored on the asset document itself (original filename, for instance) might be lost
// Default: `true`
assets: false,
// Exports documents only, without downloading or rewriting asset references
// Default: `false`
raw: true,
// Whether or not to export drafts
// Default: `true`
drafts: false,
})
const client = await google.auth.getClient({
credentials: JSON.parse(process.env.CREDENTIALS),
scopes: 'https://www.googleapis.com/auth/drive.file',
})
const drive = google.drive({
version: 'v3',
auth: client,
})
await drive.files.create({
requestBody: {
name: `${DATASET}.tar.gz`,
mimeType: 'application/gzip',
parents: [FOLDER_ID],
},
media: {
mimeType: 'application/gzip',
body: fs.createReadStream(path.join('/tmp', `${DATASET}.tar.gz`)),
},
})
// Delete oldest if more than 5 files
// 1. Get list of backup files inside folder with specified id
const res = await drive.files.list({
fields: 'files(id, parents, createdTime)',
q: `'${FOLDER_ID}' in parents`,
orderBy: 'createdTime desc',
})
const promisesArray = res.data.files
.slice(5)
.filter(file => {
const days = differenceInDays(new Date(), parseISO(file.createdTime))
return days > 30
})
.map(file => drive.files.delete({ fileId: file.id }))
return Promise.all(promisesArray)
}
exports.handler = function(event, context, callback) {
backup()
.then(() => {
return fetch(process.env.SLACK_WEBHOOK_URL, {
headers: {
'content-type': 'application/json',
},
method: 'POST',
body: JSON.stringify({ text: `Backup completed successfully` }),
})
})
.then(() => {
callback(null, {
statusCode: 200,
body: 'Everything went well!',
})
})
.catch(error => {
callback(null, {
statusCode: 422,
body: `Oops! Something went wrong. ${error}`,
})
})
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment