This Nodejs piece of code works locally. To make it work on Heroku, you need to add the https://github.com/uhray/heroku-buildpack-mongo.git buildpack.
- Have a free Heroku account
- Have a free MONGODB cluster
- Have a free Amazon S3 bucket
This Nodejs piece of code works locally. To make it work on Heroku, you need to add the https://github.com/uhray/heroku-buildpack-mongo.git buildpack.
const cron = require('node-cron'); | |
const fs = require('fs'); | |
const aws = require('aws-sdk'); | |
const path = require('path'); | |
const shell = require('./shellHelper'); | |
const mongodumbBackup = async () => { | |
// Backup everyday @1AM | |
cron.schedule('0 1 * * *', () => { | |
// Backup mongo dump + mongo export to a Heroku static folder | |
shell.series([ | |
'mongodump --uri <COMPLETE URI MONGODB STRING> --collection <COLLECTIONNAME>', | |
'mongoexport --uri <COMPLETE URI MONGODB STRING> --collection <COLLECTIONNAME> --type cs --out <PATH/TO/CSV>.csv', | |
], (err) => { | |
console.log('Mongo Backup + Exports created'); | |
// S3 upload | |
// Config -> we need to define the region so that it's the same as heroku app, to low latency | |
aws.config.update({ region: 'us-east-1' }); | |
// Secrets -> store this in your .env file | |
const s3 = new aws.S3({ | |
accessKeyId: process.env.AWS_ACCESS_KEY_ID, | |
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, | |
}); | |
const uploadFile = async (filePath, fileName) => { | |
console.log('uploading to s3...'); | |
// File | |
const fileStream = fs.createReadStream(`${filePath}${fileName}`); | |
fileStream.on('error', (fsErr) => { | |
console.log('File Error', fsErr); | |
}); | |
// Payload | |
const uploadParams = { | |
Bucket: process.env.S3_BUCKET_NAME, | |
Key: fileName, | |
Body: fileStream, | |
}; | |
// Upload | |
await s3.upload(uploadParams, (s3err, data) => { | |
if (s3err) { | |
console.log('Error', s3err); | |
} if (data) { | |
console.log(`Uploaded ${fileName} with success`, data.Location); | |
} | |
}); | |
}; | |
// Trigger uploads to S3 consecutively | |
uploadFile('./dump/<databaseName>/', '<collectionName>.bson'); | |
uploadFile('./exports/csv/', 'export.csv'); | |
}); | |
}); | |
}; | |
module.exports = mongodumbBackup; |
const express = require('express'); | |
const path = require('path'); | |
const dotenv = require('dotenv'); | |
const mongodumbBackup = require('./utils/mongodumbBackup'); | |
// Env | |
dotenv.config({ path: './config/config.env' }); | |
// Express | |
const app = express(); | |
// Body parser | |
app.use(express.json()); | |
// Cors | |
app.use(cors()); | |
// Backups | |
app.use(express.static(path.join(__dirname, './dump/<databasename>'))); | |
// Start server | |
const port = process.env.PORT || 5000; | |
const mode = process.env.NODE_ENV; | |
const server = app.listen(port, () => { | |
console.log(`Server Started in ${mode} mode on port ${port}`.green); | |
if (process.env.NODE_ENV === 'production') { | |
// Startup backup + upload tasks | |
mongodumbBackup(); | |
} | |
}); |
const childProcess = require('child_process'); | |
// Execute a single shell command where "cmd" is a string | |
exports.exec = function (cmd, cb) { | |
const parts = cmd.split(/\s+/g); | |
const p = childProcess.spawn(parts[0], parts.slice(1), { stdio: 'inherit' }); | |
p.on('exit', (code) => { | |
let err = null; | |
if (code) { | |
err = new Error(`command "${cmd}" exited with wrong status code "${code}"`); | |
err.code = code; | |
err.cmd = cmd; | |
} | |
if (cb) cb(err); | |
}); | |
}; | |
// Execute multiple commands in series | |
exports.series = function (cmds, cb) { | |
const execNext = function () { | |
exports.exec(cmds.shift(), (err) => { | |
if (err) { | |
cb(err); | |
} else if (cmds.length) execNext(); | |
else cb(null); | |
}); | |
}; | |
execNext(); | |
}; |