Skip to content

Instantly share code, notes, and snippets.

@davidivad96
Created November 18, 2024 11:13
Show Gist options
  • Save davidivad96/b23720984d3e614c5173a4fa71fab1fb to your computer and use it in GitHub Desktop.
Save davidivad96/b23720984d3e614c5173a4fa71fab1fb to your computer and use it in GitHub Desktop.
import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
import { createClient } from "@supabase/supabase-js";
import {
destinationBucket,
r2AccessKey,
r2AccountId,
r2SecretKey,
sourceBucket,
supabaseKey,
supabaseUrl,
} from "../constants";
const supabase = createClient(supabaseUrl, supabaseKey);
const s3 = new S3Client({
region: "auto",
endpoint: `https://${r2AccountId}.r2.cloudflarestorage.com`,
credentials: {
accessKeyId: r2AccessKey,
secretAccessKey: r2SecretKey,
},
});
const migrateImages = async () => {
try {
// Get all folders
const { data: folders, error: foldersError } = await supabase.storage
.from(sourceBucket)
.list("", { limit: 500 });
if (foldersError)
throw new Error(`Failed to list folders: ${foldersError.message}`);
if (!folders) throw new Error("No folders found");
console.log(`Processing ${folders.length} folders...`);
// Process folders sequentially
let imagesMigrated = 0;
for (const folder of folders) {
console.log(`Processing folder: ${folder.name}`);
const { data: folderData, error: folderError } = await supabase.storage
.from(sourceBucket)
.list(folder.name, { limit: 5 });
if (folderError) {
console.error(`Error listing folder ${folder.name}:`, folderError);
continue;
}
if (!folderData) continue;
// Process images in chunks to control concurrency
const chunkSize = 3;
for (let i = 0; i < folderData.length; i += chunkSize) {
const chunk = folderData.slice(i, i + chunkSize);
await Promise.all(
chunk.map(async (image) => {
try {
const key = `${folder.name}/${image.name}`;
console.log(`Processing image: ${key}`);
const { data: imageData, error: imageError } =
await supabase.storage.from(sourceBucket).download(key);
if (imageError)
throw new Error(`Failed to download: ${imageError.message}`);
if (!imageData) throw new Error("No image data received");
await s3.send(
new PutObjectCommand({
Bucket: destinationBucket,
Key: key,
Body: Buffer.from(await imageData.arrayBuffer()),
})
);
imagesMigrated++;
console.log(`Successfully migrated: ${key}`);
} catch (error) {
console.error(`Failed to process image ${image.name}:`, error);
}
})
);
}
}
console.log("Migration completed successfully");
console.log(`Number of images migrated: ${imagesMigrated}`);
} catch (error) {
console.error("Migration failed:", error);
throw error;
}
};
// Execute the migration
migrateImages().catch(console.error);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment