Feel free to contact me at robert.balicki@gmail.com or tweet at me @statisticsftw
This is a rough outline of how we utilize next.js and S3/Cloudfront. Hope it helps!
It assumes some knowledge of AWS.
| var aws = require('aws-sdk') | |
| var ddb = new aws.DynamoDB() | |
| exports.handler = async (event, context) => { | |
| let date = new Date() | |
| if (event.request.userAttributes.sub) { | |
| let params = { | |
| Item: { | |
| 'id': {S: event.request.userAttributes.sub}, | |
| '__typename': {S: 'User'}, |
Feel free to contact me at robert.balicki@gmail.com or tweet at me @statisticsftw
This is a rough outline of how we utilize next.js and S3/Cloudfront. Hope it helps!
It assumes some knowledge of AWS.
| # based on job classification on jobstreet.com | |
| job_classification = [ | |
| "accounting/finance", | |
| "admin/human resources", | |
| "sales/marketing", | |
| "arts/media/communications", | |
| "services", | |
| "hotel/restaurant", | |
| "education/training", | |
| "computer/IT", |
The dokku-push action requires an SSH key with push access to the Dokku instance. Here's how to do that.
Replace APPNAME with the name of the app (e.g. this is a good idea to use the same name used on Dokku's).
We want each repo to have its own SSH key, so it's easier to rotate/invalidate them if required, without affecting all the repos. Let's generate a new key on your computer (see GitHub help):
| const stripe = require('stripe')(process.env.STRIPE_SECRET_KEY); | |
| const { | |
| updateUserSubscription, | |
| deleteUserSubscription, | |
| } = require('./database'); | |
| exports.handler = async function (event, context, callback) { | |
| const webhookSecret = process.env.STRIPE_WEBHOOK_SECRET; | |
| try { | |
| const requestId = event?.requestContext?.requestId; |
| import AWS from 'aws-sdk'; | |
| import stream from 'stream' | |
| import axios from 'axios'; | |
| export default async (url, filename, callback) => { | |
| const s3 = new AWS.S3({ params: { Bucket: process.env.STATIC_MAPS_BUCKET }}); | |
| let contentType = 'application/octet-stream' | |
| let promise = null | |
| const uploadStream = () => { |
Granted, this is little more than an obfuscated way of having a publicly writable S3 bucket, but if you don’t have a server which can pre-sign URLs for you, this might be an acceptable solution.
For this to work, you take the following steps:
| var AWS = require('aws-sdk'), | |
| fs = require('fs'); | |
| // For dev purposes only | |
| AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' }); | |
| // Read in the file, convert it to base64, store to S3 | |
| fs.readFile('del.txt', function (err, data) { | |
| if (err) { throw err; } |
| const items = [{ | |
| "Username": "First", | |
| "Timestamp": "2017-08-07T19:51:00.794Z", | |
| "Message": "first item in batch" | |
| }, | |
| { | |
| "Username": "Second", | |
| "Timestamp": "2017-08-07T19:51:00.794Z", | |
| "Message": "second item in batch" | |
| }, |