Skip to content

Instantly share code, notes, and snippets.

@onekiloparsec
Last active October 21, 2020 14:51
Show Gist options
  • Save onekiloparsec/744addc20da3024a25a8e88b6423be62 to your computer and use it in GitHub Desktop.
Save onekiloparsec/744addc20da3024a25a8e88b6423be62 to your computer and use it in GitHub Desktop.
require('dotenv').config()
const path = require('path')
const fs = require('fs')
const yaml = require('js-yaml')
const { exec } = require('child_process')
const { appBuilderPath } = require('app-builder-bin')
const AWS = require('aws-sdk')
const axios = require('axios')
const packageData = JSON.parse(fs.readFileSync('package.json'))
const distFolder = './YOUR DIST FOLDER FOR ELECTRON HERE/'
const appName = 'YOUR APP NAME HERE'
const originalApp = distFolder + 'mac/' + appName + '.app'
const finalZip = `${distFolder}${appName}-${packageData.version}-mac.zip`
const finalDmg = `${distFolder}${appName}-${packageData.version}.dmg`
const latestMacFile = './dist-electron/latest-mac.yml'
const s3Client = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: process.env.AWS_BUCKET_REGION,
bucket: process.env.AWS_BUCKET_NAME,
signatureVersion: 'v4',
s3ForcePathStyle: 'true'
})
function uploadToS3 (filePath) {
// ex: /path/to/my-picture.png becomes my-picture.png
var fileName = path.basename(filePath)
var fileStream = fs.createReadStream(filePath)
console.log(`Uploading fileName to S3 with (${process.env.AWS_ACCESS_KEY_ID.substring(0, 6)}...)`)
// We wrap this in a promise so that we can handle a fileStream error
// since it can happen *before* s3 actually reads the first 'data' event
return new Promise(function (resolve, reject) {
fileStream.once('error', reject)
s3Client.upload({
Bucket: process.env.AWS_BUCKET_NAME, Key: fileName, Body: fileStream
}
).promise().then(resolve, reject)
})
}
console.log('Adding PlugIns folder into app...')
exec(`mkdir -p ${originalApp}/Contents/PlugIns`, (error, stdout, stderr) => {
if (error) {
console.log(`error: ${error}`)
return
}
if (stderr) {
console.log(`stderr: ${stderr}`)
return
}
console.log('Zipping .app...')
exec(`ditto -c -k --sequesterRsrc --keepParent ${originalApp} ${finalZip}`, (error, stdout, stderr) => {
if (error) {
console.log(`error: ${error}`)
return
}
if (stderr) {
console.log(`stderr: ${stderr}`)
return
}
console.log('Extracting new zip values...')
exec(`${appBuilderPath} blockmap -i ${finalZip}`, (error, stdout, stderr) => {
if (error) {
console.log(`error: ${error}`)
return
}
if (stderr) {
console.log(`stderr: ${stderr}`)
return
}
const { size, sha512, blockMapSize } = JSON.parse(stdout)
let latestMacYaml
try {
latestMacYaml = yaml.safeLoadAll(fs.readFileSync(latestMacFile, 'utf8'))
} catch (e) {
console.log(e)
return
}
const { size: oldSize, sha512: oldSha512, blockMapSize: oldBlockMapSize } = latestMacYaml[0]['files'][0]
console.log(`size ${oldSize} -> ${size}`)
console.log(`sha512 ${oldSha512} -> ${sha512}`)
console.log(`blockMapSize ${oldBlockMapSize} -> ${blockMapSize}`)
latestMacYaml[0]['files'][0].size = size
latestMacYaml[0]['files'][0].sha512 = sha512
latestMacYaml[0]['files'][0].blockMapSize = blockMapSize
latestMacYaml[0].sha512 = sha512
fs.writeFileSync(latestMacFile, yaml.safeDump(latestMacYaml[0]), 'utf8')
console.log('YAML updated.')
const files = [latestMacFile, finalZip, finalDmg]
files.forEach(f => {
uploadToS3(f)
.then(function (result) {
console.log('Uploaded to s3:', result)
})
.catch(function (err) {
console.error('something bad happened:', err.toString())
})
})
})
})
})
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment