참고 https://supabase.com/docs/guides/platform/migrating-and-upgrading-projects#migrate-storage-objects
// storage_restore.js
const { createClient } = require("@supabase/supabase-js");
const fs = require("fs");
const PROJECT_URL = "http://localhost:8100";
const PROJECT_SERVICE_KEY = "service_role_key";
(async () => {
console.log("started at: ", new Date().toISOString());
const supabaseRestClient = createClient(PROJECT_URL, PROJECT_SERVICE_KEY, {
db: { schema: "storage" },
});
const supabaseClient = createClient(PROJECT_URL, PROJECT_SERVICE_KEY);
// make sure you update max_rows in postgrest settings if you have a lot of objects or paginate here
const { data: objects, error } = await supabaseRestClient
.from("objects")
.select();
// .eq("bucket_id", "manuals");
if (error) {
console.log("error getting objects from old bucket");
throw error;
}
for (const objectData of objects) {
console.log(`moving ${objectData.id}`);
try {
const data = fs.readFileSync(
`./storage/stub/stub/${objectData.bucket_id}/${objectData.name}/${objectData.version}`
);
const { _, error: uploadObjectError } = await supabaseClient.storage
.from(objectData.bucket_id)
.upload(objectData.name, data, {
upsert: true,
contentType: objectData.metadata.mimetype,
cacheControl: objectData.metadata.cacheControl,
});
if (uploadObjectError) {
throw uploadObjectError;
}
} catch (err) {
console.log("error moving ", objectData);
console.log(err);
}
}
console.log("finished at: ", new Date().toISOString());
})();
이 파일을 supabase studio 컨테이너로 이동시킨다.
docker cp storage_restore.js container_id:/app/
기존 폴더에서 supabase studio 컨테이너로 파일을 이동시킨다.
docker cp storage container_id:/app/
컨테이너 안으로 진입해서 스크립트를 실행한다
docker exec -it container_id bash
cd app
node store_restore.js