Last active
February 7, 2023 21:09
-
-
Save TheHans255/1b407b9a8287e4ca4626e87ea6b93dce to your computer and use it in GitHub Desktop.
Publishing Script for IPFS and AWS
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { exec } from "child_process" | |
import AWS from "aws-sdk"; | |
import { promisify } from "util"; | |
import { promises as fs, createReadStream } from 'fs'; | |
import * as path from 'path'; | |
import * as mime from 'mime-types'; | |
/* | |
* Node.js script to publish website simultaneously to IPFS and AWS. | |
* Dependencies: | |
* - Node.js 18+ | |
* - "aws-sdk": "^2.1275.0" | |
* - "mime-types": "^2.1.35" | |
*/ | |
// Directory containing the static website you wish to serve | |
const DIST_DIR = "dist"; | |
// The AWS credentials profile to use. AWS will reference the | |
// $(HOME)/.aws/credentials file to pull credentials from the | |
// matching header. | |
// See: https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/loading-node-credentials-shared.html | |
const PROFILE = "" | |
// The domain name you are serving from. | |
const DOMAIN_NAME = "example.com"; | |
// The S3 bucket you are uploading to. Note that because of how | |
// S3 web hosting works, this should be the same as your | |
// domain name. | |
const S3_BUCKET = DOMAIN_NAME; | |
// The AWS Cloudfront distribution ID you are serving from | |
// in order to provide HTTPS access. | |
const CLOUDFRONT_DISTRIBUTION_ID = ""; | |
// The ID of the AWS Route 53 hosted zone hosting your domain name. | |
// This is used to update the DNSLink record. | |
const HOSTED_ZONE_ID = ""; | |
const credentials = new AWS.SharedIniFileCredentials({ profile: PROFILE }); | |
AWS.config.credentials = credentials; | |
/** | |
* Publish the static website to IPFS, returning the hash of the | |
* top level directory entry. | |
*/ | |
async function publishToIpfs() { | |
const ipfsResult = await promisify(exec)("ipfs add -rQ " + DIST_DIR); | |
const folderHash = ipfsResult.stdout.trim(); | |
console.log(`Uploaded directory to IPFS with hash ${folderHash}`) | |
return folderHash; | |
} | |
/** | |
* Update the DNSLink record for the domain name. | |
*/ | |
async function updateDnsRecord(ipfsFolderHash) { | |
const route53 = new AWS.Route53(); | |
const changeOrder = { | |
"HostedZoneId": HOSTED_ZONE_ID, | |
"ChangeBatch": { | |
"Comment": `Update website (${new Date().toISOString()}, ${ipfsFolderHash})`, | |
"Changes": [ | |
{ | |
"Action": "UPSERT", | |
"ResourceRecordSet": { | |
"Name": `_dnslink.${DOMAIN_NAME}.`, | |
"Type": "TXT", | |
"TTL": 300, | |
"ResourceRecords": [ | |
{ | |
"Value": `\"dnslink=/ipfs/${ipfsFolderHash}\"` | |
} | |
] | |
} | |
} | |
] | |
} | |
} | |
await route53.changeResourceRecordSets(changeOrder).promise(); | |
console.log(`Hosted zone ${HOSTED_ZONE_ID} successfully updated`); | |
} | |
/** | |
* Publish the static website to S3 | |
*/ | |
async function publishToS3() { | |
// Adapted from https://stackoverflow.com/a/65862128/2033557 | |
const s3 = new AWS.S3(); | |
// Recursive getFiles from | |
// https://stackoverflow.com/a/45130990/831465 | |
async function getFiles(dir) { | |
const dirents = await fs.readdir(dir, { withFileTypes: true }); | |
const files = await Promise.all( | |
dirents.map((dirent) => { | |
const res = path.resolve(dir, dirent.name); | |
return dirent.isDirectory() ? getFiles(res) : res; | |
}) | |
); | |
return Array.prototype.concat(...files); | |
} | |
const files = (await getFiles(DIST_DIR)); | |
const uploads = files.map((filePath) => | |
s3 | |
.putObject({ | |
Key: path.relative(DIST_DIR, filePath), | |
Bucket: S3_BUCKET, | |
Body: createReadStream(filePath), | |
ContentType: mime.lookup(filePath) || 'octet/binary', | |
}) | |
.promise() | |
); | |
await Promise.all(uploads); | |
console.log(`${uploads.length} files successfully uploaded to S3`); | |
} | |
/** | |
* Invalidate the cache for the Cloudfront distribution. | |
* This is necessary because Cloudfront caches content | |
* for 24 hours by default. | |
*/ | |
async function invalidateCloudfrontCache(ipfsFolderHash) { | |
const cloudFront = new AWS.CloudFront(); | |
const invalidationResult = await cloudFront.createInvalidation({ | |
DistributionId: CLOUDFRONT_DISTRIBUTION_ID, | |
InvalidationBatch: { CallerReference: ipfsFolderHash, Paths: { Quantity: 1, Items: [ "/*" ] } } | |
}).promise(); | |
console.log(invalidationResult); | |
await cloudFront.waitFor("invalidationCompleted", { | |
DistributionId: CLOUDFRONT_DISTRIBUTION_ID, | |
Id: invalidationResult.Invalidation.Id | |
}).promise(); | |
console.log("Cloudfront invalidation completed"); | |
} | |
const ipfsFolderHash = await publishToIpfs(); | |
await Promise.all([updateDnsRecord(ipfsFolderHash), publishToS3()]); | |
await invalidateCloudfrontCache(ipfsFolderHash); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment