Skip to content

Instantly share code, notes, and snippets.

@eno-conan
Last active March 31, 2025 10:06
Supabase Local Development Restore Storage Objects
# replace values on your own environment.
# Supabase設定
SUPABASE_LOCAL_URL=http://127.0.0.1:54321
SUPABASE_LOCAL_SERVICE_KEY=eyJhbCgiOiJIUzI1NiLsInR5cCI1IkpKECJ1.eyJpc1MiOiJzdIQhGmFzZS1kKW1vIiwicm1sZOS1InNlcnZpY1Viwm1sZSIsImV1cCI1MTk4MzgxMjk5Nn0.EGKF31RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU
# データベース設定
DB_HOST=127.0.0.1
DB_PORT=54322
DB_USER=postgres
DB_PASSWORD=postgres
DB_NAME=postgres
DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:54322/postgres
# S3互換ストレージ設定
S3_ENDPOINT=http://127.0.0.1:54321/storage/v1/s3
S3_REGION=local
S3_LOCAL_ACCESS_KEY=929003a08b11bf1b5ff111c111f1d11c
S3_LOCAL_SECRET_KEY=123456e4652dd093b7a15c58ae0d2d34bd487cc0ea1111aed6eda11111111111
# バケット設定
LOCAL_BUCKET_NAME=starbucks
# バックアップパス
BACKUP_PATH=../storage_backup
# SQLファイル設定
SQL_FILE_NAME=starbucks_bucket_policy.sql
const fs = require('fs');
const { Pool } = require('pg');
const path = require('path');
require('dotenv').config({ path: path.join(__dirname, '.env') });
/**
* PostgreSQLデータベースにSQLファイルを読み込んで実行する関数
* @param {string} dbUrl - PostgreSQL接続URL
* @param {string} sqlFilePath - 実行するSQLファイルのパス
*/
async function executeSqlFile(dbUrl, sqlFilePath) {
// SQLファイルの存在確認
if (!fs.existsSync(sqlFilePath)) {
console.error(`SQLファイルが見つかりません: ${sqlFilePath}`);
process.exit(1);
}
console.log(`SQLファイルを読み込んでいます: ${path.basename(sqlFilePath)}`);
// SQLファイルを読み込む
const sqlContent = fs.readFileSync(sqlFilePath, 'utf8');
// PostgreSQL接続プールを作成
const pool = new Pool({
connectionString: dbUrl
});
try {
console.log('データベースに接続しています...');
// クエリを実行
console.log('SQLを実行しています...');
const result = await pool.query(sqlContent);
console.log('SQLファイルの実行が完了しました');
return result;
} catch (error) {
console.error('SQLファイルの実行中にエラーが発生しました:', error.message);
throw error;
} finally {
// プールを終了して接続をクリーンアップ
await pool.end();
console.log('データベース接続を終了しました');
}
}
// __dirnameを使用して現在のスクリプトのディレクトリを取得
const currentDir = __dirname;
const sqlFileName = process.env.SQL_FILE_NAME || 'starbucks_bucket_policy.sql';
const sqlFilePath = path.join(currentDir, sqlFileName);
// 使用例
async function main() {
const dbHost = process.env.DB_HOST || '127.0.0.1';
const dbPort = process.env.DB_PORT || 54322;
const dbUser = process.env.DB_USER || 'postgres';
const dbPassword = process.env.DB_PASSWORD || 'postgres';
const dbName = process.env.DB_NAME || 'postgres';
const dbUrl = process.env.DATABASE_URL ||
`postgresql://${dbUser}:${dbPassword}@${dbHost}:${dbPort}/${dbName}`;
console.log(`検索しているSQLファイルのパス: ${sqlFilePath}`);
try {
// ディレクトリの内容を確認して出力
// console.log('ディレクトリの内容:');
const files = fs.readdirSync(currentDir);
files.forEach(file => {
// console.log(` - ${file}`); // 必要があれば実行
});
// ファイルが存在するか確認
if (files.includes(path.basename(sqlFilePath))) {
console.log(`${path.basename(sqlFilePath)}が見つかりました。実行を開始します。`);
await executeSqlFile(dbUrl, sqlFilePath);
console.log('処理が正常に完了しました');
} else {
console.error(`${path.basename(sqlFilePath)}がディレクトリ内に見つかりません。`);
process.exit(1);
}
} catch (error) {
console.error('エラーが発生しました:', error);
process.exit(1);
}
}
// スクリプトが直接実行された場合に実行
if (require.main === module) {
main();
}
module.exports = { executeSqlFile };
const { S3Client, ListObjectsV2Command, GetObjectCommand } = require('@aws-sdk/client-s3');
const fs = require('fs');
const path = require('path');
const readline = require('readline');
require('dotenv').config({ path: path.join(__dirname, '.env') });
// S3設定を環境変数から読み込む
const S3_ENDPOINT = process.env.S3_ENDPOINT || 'http://127.0.0.1:54321/storage/v1/s3';
const S3_REGION = process.env.S3_REGION || 'local';
const S3_LOCAL_ACCESS_KEY = process.env.S3_LOCAL_ACCESS_KEY || '';
const S3_LOCAL_SECRET_KEY = process.env.S3_LOCAL_SECRET_KEY || '';
const LOCAL_BUCKET_NAME = process.env.LOCAL_BUCKET_NAME || 'starbucks';
const BACKUP_PATH = process.env.BACKUP_PATH || path.join(__dirname, '../storage_backup');
const client = new S3Client({
endpoint: S3_ENDPOINT,
region: S3_REGION,
credentials: {
accessKeyId: S3_LOCAL_ACCESS_KEY,
secretAccessKey: S3_LOCAL_SECRET_KEY
},
forcePathStyle: true
});
// ユーザー入力を取得するための関数
function askQuestion(query) {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise(resolve => rl.question(query, ans => {
rl.close();
resolve(ans);
}));
}
async function backupStorage() {
const backupDir = BACKUP_PATH;
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
try {
const listParams = { Bucket: LOCAL_BUCKET_NAME };
try {
const data = await client.send(new ListObjectsV2Command(listParams));
if (!data.Contents || data.Contents.length === 0) {
console.log(`バケット '${LOCAL_BUCKET_NAME}' は空です。バックアップするものがありません。`);
return;
}
console.log(`バケット '${LOCAL_BUCKET_NAME}' から ${data.Contents.length} 個のオブジェクトをバックアップします...`);
for (const item of data.Contents) {
const getParams = { Bucket: LOCAL_BUCKET_NAME, Key: item.Key };
const fileData = await client.send(new GetObjectCommand(getParams));
// ここでパス変換を追加
const safeKey = item.Key.replace(/\\/g, '/');
const filePath = path.join(backupDir, safeKey);
const dirname = path.dirname(filePath);
if (!fs.existsSync(dirname)) {
fs.mkdirSync(dirname, { recursive: true });
}
const writeStream = fs.createWriteStream(filePath);
fileData.Body.pipe(writeStream);
// Promiseを返して完了を追跡
await new Promise((resolve, reject) => {
writeStream.on('finish', () => {
console.log(`Backed up: ${safeKey}`);
resolve();
});
writeStream.on('error', (err) => {
console.error(`Error backing up ${safeKey}:`, err);
reject(err);
});
});
}
} catch (error) {
// バケットが存在しないなどのエラーの場合
console.error(`バケットアクセス中にエラーが発生しました:`, error.message);
// ユーザーに確認
const answer = await askQuestion('現在のバックアップフォルダの内容を適用しますか? (y/n): ');
if (answer.toLowerCase() === 'y') {
console.log('バックアップフォルダの内容を適用します。処理を正常終了します。');
return;
} else {
throw new Error('ユーザーが操作をキャンセルしました。');
}
}
} catch (error) {
console.error(`バックアップ時にエラーが発生しました:`, error);
throw error;
}
console.log('🎉 Storage backup completed successfully!');
}
// スクリプトが直接実行された場合に実行
if (require.main === module) {
backupStorage().catch(error => {
console.error('❌ Backup failed:', error);
process.exit(1);
});
}
module.exports = { backupStorage };
{
"devDependencies": {
"@aws-sdk/client-s3": "^3.777.0",
"npm-run-all": "^4.1.5",
"supabase": "^2.20.5"
},
"scripts": {
"start": "supabase start",
"stop": "supabase stop",
"login": "supabase login",
"link": "supabase link --project-ref",
"migrate:new": "supabase migration new",
"db:diff": "supabase db diff",
"db:diff:name": "supabase db diff --use-migra -f",
"db:push": "supabase db push",
"db:reset": "run-s db:backup:storage db:do:reset db:restore:storage db:restore:policy",
"db:restore": "run-s db:restore:storage db:restore:policy",
"db:do:reset": "supabase db reset",
"db:backup:storage": "node scripts/backupStorage.js",
"db:restore:storage": "node scripts/restoreStorage.js",
"db:restore:policy": "node scripts/applyBucketPolicy.js"
},
"dependencies": {
"@supabase/supabase-js": "^2.49.3",
"mime-types": "^3.0.1",
"pg": "^8.14.1"
}
}
const { createClient } = require('@supabase/supabase-js');
const fs = require('fs');
const path = require('path');
const mime = require('mime-types');
const { Client } = require('pg');
require('dotenv').config({ path: path.join(__dirname, '.env') });
// Environment variables from .env
const SUPABASE_LOCAL_URL = process.env.SUPABASE_LOCAL_URL || 'http://127.0.0.1:54321';
const SERVICE_KEY = process.env.SUPABASE_LOCAL_SERVICE_KEY;
const DB_HOST = process.env.DB_HOST || '127.0.0.1';
const DB_PORT = process.env.DB_PORT || 54322;
const DB_USER = process.env.DB_USER || 'postgres';
const DB_PASSWORD = process.env.DB_PASSWORD || 'postgres';
const DB_NAME = process.env.DB_NAME || 'postgres';
const LOCAL_BUCKET_NAME = process.env.LOCAL_BUCKET_NAME || 'starbucks';
// クライアント初期化(RLSバイパス用)
const supabaseAdmin = createClient(SUPABASE_LOCAL_URL, SERVICE_KEY, {
auth: {
autoRefreshToken: false,
persistSession: false
}
});
async function createBucketIfNotExists(bucketName) {
try {
const { data: existingBucket, error: listError } = await supabaseAdmin
.storage
.listBuckets();
if (listError) throw listError;
const bucketExists = existingBucket.some(b => b.name === bucketName);
if (bucketExists) {
console.log(`Bucket '${bucketName}' already exists`);
return;
}
// allowedMimeTypesを明示的に設定
const { data: newBucket, error: createError } = await supabaseAdmin
.storage
.createBucket(bucketName, {
public: false,
allowedMimeTypes: ['image/png'],
fileSizeLimit: '10MB' // 必要に応じてサイズ制限を調整
});
if (createError) throw createError;
console.log(`Bucket '${bucketName}' created successfully`);
} catch (error) {
console.error('Bucket creation error:', error);
throw error;
}
}
async function uploadFile(bucketName, filePath, objectPath) {
const fileContent = fs.readFileSync(filePath);
const contentType = mime.lookup(filePath) || 'application/octet-stream';
const { error } = await supabaseAdmin
.storage
.from(bucketName)
.upload(objectPath, fileContent, {
contentType, // 検出したMIMEタイプを使用
upsert: true,
cacheControl: '3600'
});
if (error) {
if (!error.message.includes('already exists')) {
throw error;
}
console.log(`Skipped (exists): ${objectPath}`);
} else {
console.log(`Uploaded: ${objectPath} (${contentType})`);
}
}
async function restoreStorage() {
const bucketName = LOCAL_BUCKET_NAME;
const backupDir = path.join(__dirname, '../storage_backup');
try {
// バケット作成
await createBucketIfNotExists(bucketName);
if (!fs.existsSync(backupDir)) {
console.log('No backup found to restore');
return;
}
const processDirectory = async (directory, relativePath = '') => {
const items = fs.readdirSync(directory, { withFileTypes: true });
for (const item of items) {
const fullPath = path.join(directory, item.name);
const objectPath = path.join(relativePath, item.name)
.replace(/\\/g, '/') // Windowsパス対策
.replace(/\/+/g, '/'); // 連続スラッシュを修正
if (item.isDirectory()) {
await processDirectory(fullPath, objectPath);
} else {
await uploadFile(bucketName, fullPath, objectPath);
}
}
};
await processDirectory(backupDir);
console.log('🎉 Storage restore completed successfully!');
} catch (error) {
console.error('❌ Restore failed:', error);
process.exit(1);
}
}
restoreStorage();
# replace values on your own environment.
CREATE POLICY "Authenticated users can read starbucks bucket"
ON storage.objects
FOR SELECT
USING (
bucket_id = 'starbucks' AND auth.role() = 'authenticated'
);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment