Skip to content

Instantly share code, notes, and snippets.

@waptik
Last active October 13, 2022 20:27
Show Gist options
  • Save waptik/165143ff4647f6a55ace44e4c5793b77 to your computer and use it in GitHub Desktop.
Save waptik/165143ff4647f6a55ace44e4c5793b77 to your computer and use it in GitHub Desktop.
A POC for uploading files to a cloudflare r2 public bucket using presigned url in nextjs
// utils/files.ts
type Obj = {
name: string;
size: string;
type: string;
};
type PresignedPost = {
url: string;
expiresIn: number;
};
export const getPresignedUrl = async ({ name, size, type }: Obj) => {
return new Promise<PresignedPost>(async (resolve, reject) => {
try {
const res = await fetch(`/api/upload?name=${name}&size=${size}&type=${type}`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
});
const data = await res.json();
resolve(data);
} catch (error) {
console.log("getPresignedUrl", { error });
reject(error);
}
});
};
// utils/s3.ts
import { S3Client } from "@aws-sdk/client-s3";
import memoizeOne from "memoize-one";
import { nanoid } from "nanoid";
const region = "auto";
export const s3Buckets = {
dev: process.env.R2_BUCKET_DEV,
prod: process.env.R2_BUCKET_PROD,
};
const getClient = memoizeOne(() => {
const endpoint = `https://${process.env.R2_ACCOUNT_ID}.r2.cloudflarestorage.com`;
return new S3Client({
region,
credentials: {
accessKeyId: process.env.S3_COMPATIBLE_ACCESS_ID as string,
secretAccessKey: process.env.S3_COMPATIBLE_SECRET_KEY as string,
sessionToken: `session-${nanoid()}`,
},
endpoint,
forcePathStyle: true,
apiVersion: "v4",
});
});
export const s3Client = getClient();
// pages/api/upload.ts
/**
* The content of this file was inspired by the example made by @leeerob for @vercel.
* @see {@link https://github.com/vercel/examples/blob/main/solutions/aws-s3-image-upload/pages/api/upload-url.ts}
*/
import { PutObjectCommand } from "@aws-sdk/client-s3";
import { getSignedUrl as signUurl } from "@aws-sdk/s3-request-presigner";
import * as crypto from "crypto";
import { NextApiRequest, NextApiResponse } from "next";
import { s3Bucket as Bucket, s3Client } from "~utils/s3";
type Query = {
name: string;
size: string;
type: string;
};
async function handler(req: NextApiRequest, res: NextApiResponse) {
const query = req.query as unknown as Query;
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("Access-Control-Allow-Methods", "POST");
const key = crypto.randomUUID();
const expiresIn = 30 * 60; // 30 minutes
const command = new PutObjectCommand({
Bucket,
Key: key,
ContentType: query.type,
ContentLength: parseInt(query.size, 10),
});
const url = await signUurl(s3Client, command, { expiresIn });
const obj = { url, expiresIn };
return res.status(200).json(obj);
}
export default handler;
// pages/Upload.tsx
import { getPresignedUrl } from "~/utils/files";
export default function Upload() {
const uploadPhoto = async (e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
const name = encodeURIComponent(file?.name as string);
const type = encodeURIComponent(file?.type as string);
const res = await getPresignedUrl({ name, type, size: "" + file!.size });
const { url } = res;
const upload = await fetch(url, {
method: "PUT",
body: file,
headers: {
"Content-Type": file?.type as string,
},
});
if (upload.ok) {
console.log("Uploaded successfully!");
} else {
console.error("Upload failed.");
}
};
return (
<>
<p>Upload a .png or .jpg image (max 1MB).</p>
<input onChange={uploadPhoto} type="file" accept="image/png, image/jpeg" />
</>
);
}
export async function getServerSideProps() {
const config = {
AllowedHeaders: ["content-type"],
AllowedMethods: ["PUT"],
AllowedOrigins: ["*"],
};
const bucketParams = {
Bucket: s3Bucket,
CORSConfiguration: { CORSRules: new Array(config) },
};
const command = new PutBucketCorsCommand(bucketParams);
const data = await s3Client.send(command);
console.log({ bucketParams, data });
return {
props: {},
};
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment