Skip to content

Commit

Permalink
feat: move and copy objects across buckets (#197)
Browse files Browse the repository at this point in the history
  • Loading branch information
fenos committed Apr 18, 2024
1 parent 68a6197 commit adb58bb
Show file tree
Hide file tree
Showing 11 changed files with 82 additions and 20 deletions.
15 changes: 15 additions & 0 deletions infra/docker-compose.yml
Expand Up @@ -43,6 +43,7 @@ services:
- assets-volume:/tmp/storage
healthcheck:
test: ['CMD-SHELL', 'curl -f -LI http://localhost:5000/status']
interval: 2s
db:
build:
context: ./postgres
Expand All @@ -63,6 +64,20 @@ services:
timeout: 5s
retries: 5

dummy_data:
build:
context: ./postgres
depends_on:
storage:
condition: service_healthy
volumes:
- ./postgres:/sql
command:
- psql
- "postgresql://postgres:postgres@db:5432/postgres"
- -f
- /sql/dummy-data.sql

imgproxy:
image: darthsim/imgproxy
ports:
Expand Down
1 change: 0 additions & 1 deletion infra/postgres/Dockerfile
Expand Up @@ -3,7 +3,6 @@ FROM supabase/postgres:0.13.0
COPY 00-initial-schema.sql /docker-entrypoint-initdb.d/00-initial-schema.sql
COPY auth-schema.sql /docker-entrypoint-initdb.d/01-auth-schema.sql
COPY storage-schema.sql /docker-entrypoint-initdb.d/02-storage-schema.sql
COPY dummy-data.sql /docker-entrypoint-initdb.d/03-dummy-data.sql

# Build time defaults
ARG build_POSTGRES_DB=postgres
Expand Down
3 changes: 2 additions & 1 deletion infra/postgres/dummy-data.sql
Expand Up @@ -9,7 +9,8 @@ INSERT INTO "storage"."buckets" ("id", "name", "owner", "created_at", "updated_a
('bucket2', 'bucket2', '4d56e902-f0a0-4662-8448-a4d9e643c142', '2021-02-17 04:43:32.770206+00', '2021-02-17 04:43:32.770206+00'),
('bucket3', 'bucket3', '4d56e902-f0a0-4662-8448-a4d9e643c142', '2021-02-17 04:43:32.770206+00', '2021-02-17 04:43:32.770206+00'),
('bucket4', 'bucket4', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-25 09:23:01.58385+00', '2021-02-25 09:23:01.58385+00'),
('bucket5', 'bucket5', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00');
('bucket5', 'bucket5', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00'),
('bucket-move', 'bucket-move', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00');


-- insert objects
Expand Down
7 changes: 3 additions & 4 deletions infra/postgres/storage-schema.sql
Expand Up @@ -28,7 +28,6 @@ CREATE TABLE "storage"."objects" (
"last_accessed_at" timestamptz DEFAULT now(),
"metadata" jsonb,
CONSTRAINT "objects_bucketId_fkey" FOREIGN KEY ("bucket_id") REFERENCES "storage"."buckets"("id"),
CONSTRAINT "objects_owner_fkey" FOREIGN KEY ("owner") REFERENCES "auth"."users"("id"),
PRIMARY KEY ("id")
);
CREATE UNIQUE INDEX "bucketid_objname" ON "storage"."objects" USING BTREE ("bucket_id","name");
Expand Down Expand Up @@ -86,7 +85,7 @@ CREATE OR REPLACE FUNCTION storage.search(prefix text, bucketname text, limits i
LANGUAGE plpgsql
AS $function$
BEGIN
return query
return query
with files_folders as (
select ((string_to_array(objects.name, '/'))[levels]) as folder
from objects
Expand All @@ -95,8 +94,8 @@ BEGIN
GROUP by folder
limit limits
offset offsets
)
select files_folders.folder as name, objects.id, objects.updated_at, objects.created_at, objects.last_accessed_at, objects.metadata from files_folders
)
select files_folders.folder as name, objects.id, objects.updated_at, objects.created_at, objects.last_accessed_at, objects.metadata from files_folders
left join objects
on prefix || files_folders.folder = objects.name and objects.bucket_id=bucketname;
END
Expand Down
2 changes: 1 addition & 1 deletion infra/storage/Dockerfile
@@ -1,3 +1,3 @@
FROM supabase/storage-api:v0.35.1
FROM supabase/storage-api:v1.0.10

RUN apk add curl --no-cache
2 changes: 1 addition & 1 deletion package.json
Expand Up @@ -30,7 +30,7 @@
"types-generate": "dts-gen -m '@supabase/storage-js' -s",
"test": "run-s test:clean test:infra test:suite test:clean",
"test:suite": "jest --runInBand",
"test:infra": "cd infra && docker-compose down && docker-compose up -d && sleep 10",
"test:infra": "cd infra && docker-compose down && docker-compose up -d --build && sleep 10",
"test:clean": "cd infra && docker-compose down --remove-orphans",
"docs": "typedoc --entryPoints src/index.ts --out docs/v2 --entryPoints src/packages/* --excludePrivate --excludeProtected",
"docs:json": "typedoc --json docs/v2/spec.json --entryPoints src/index.ts --entryPoints src/packages/* --excludePrivate --excludeExternals --excludeProtected"
Expand Down
4 changes: 4 additions & 0 deletions src/lib/types.ts
Expand Up @@ -45,6 +45,10 @@ export interface FileOptions {
duplex?: string
}

export interface DestinationOptions {
destinationBucket?: string
}

export interface SearchOptions {
/**
* The number of files you want to be returned.
Expand Down
29 changes: 22 additions & 7 deletions src/packages/StorageFileApi.ts
Expand Up @@ -7,6 +7,7 @@ import {
SearchOptions,
FetchParameters,
TransformOptions,
DestinationOptions,
} from '../lib/types'

const DEFAULT_SEARCH_OPTIONS = {
Expand Down Expand Up @@ -68,7 +69,7 @@ export default class StorageFileApi {
fileOptions?: FileOptions
): Promise<
| {
data: { id: string, path: string, fullPath: string }
data: { id: string; path: string; fullPath: string }
error: null
}
| {
Expand Down Expand Up @@ -138,7 +139,7 @@ export default class StorageFileApi {
fileOptions?: FileOptions
): Promise<
| {
data: { id: string, path: string, fullPath: string }
data: { id: string; path: string; fullPath: string }
error: null
}
| {
Expand Down Expand Up @@ -282,7 +283,7 @@ export default class StorageFileApi {
fileOptions?: FileOptions
): Promise<
| {
data: { id: string, path: string, fullPath: string }
data: { id: string; path: string; fullPath: string }
error: null
}
| {
Expand All @@ -298,10 +299,12 @@ export default class StorageFileApi {
*
* @param fromPath The original file path, including the current file name. For example `folder/image.png`.
* @param toPath The new file path, including the new file name. For example `folder/image-new.png`.
* @param options The destination options.
*/
async move(
fromPath: string,
toPath: string
toPath: string,
options?: DestinationOptions
): Promise<
| {
data: { message: string }
Expand All @@ -316,7 +319,12 @@ export default class StorageFileApi {
const data = await post(
this.fetch,
`${this.url}/object/move`,
{ bucketId: this.bucketId, sourceKey: fromPath, destinationKey: toPath },
{
bucketId: this.bucketId,
sourceKey: fromPath,
destinationKey: toPath,
destinationBucket: options?.destinationBucket,
},
{ headers: this.headers }
)
return { data, error: null }
Expand All @@ -334,10 +342,12 @@ export default class StorageFileApi {
*
* @param fromPath The original file path, including the current file name. For example `folder/image.png`.
* @param toPath The new file path, including the new file name. For example `folder/image-copy.png`.
* @param options The destination options.
*/
async copy(
fromPath: string,
toPath: string
toPath: string,
options?: DestinationOptions
): Promise<
| {
data: { path: string }
Expand All @@ -352,7 +362,12 @@ export default class StorageFileApi {
const data = await post(
this.fetch,
`${this.url}/object/copy`,
{ bucketId: this.bucketId, sourceKey: fromPath, destinationKey: toPath },
{
bucketId: this.bucketId,
sourceKey: fromPath,
destinationKey: toPath,
destinationBucket: options?.destinationBucket,
},
{ headers: this.headers }
)
return { data: { path: data.Key }, error: null }
Expand Down
2 changes: 1 addition & 1 deletion test/__snapshots__/storageApi.test.ts.snap
Expand Up @@ -4,7 +4,7 @@ exports[`bucket api Get bucket by id 1`] = `
{
"allowed_mime_types": null,
"created_at": "2021-02-17T04:43:32.770Z",
"file_size_limit": 0,
"file_size_limit": null,
"id": "bucket2",
"name": "bucket2",
"owner": "4d56e902-f0a0-4662-8448-a4d9e643c142",
Expand Down
2 changes: 1 addition & 1 deletion test/storageApi.test.ts
Expand Up @@ -3,7 +3,7 @@ import { StorageClient } from '../src/index'
// TODO: need to setup storage-api server for this test
const URL = 'http://localhost:8000/storage/v1'
const KEY =
'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJPbmxpbmUgSldUIEJ1aWxkZXIiLCJpYXQiOjE2ODA5NjcxMTUsImV4cCI6MTcxMjUwMzI1MywiYXVkIjoiIiwic3ViIjoiMzE3ZWFkY2UtNjMxYS00NDI5LWEwYmItZjE5YTdhNTE3YjRhIiwicm9sZSI6ImF1dGhlbnRpY2F0ZWQifQ.NNzc54y9cZ2QLUHVSrCPOcGE2E0i8ouldc-AaWLsI08'
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoiYXV0aGVudGljYXRlZCIsInN1YiI6IjMxN2VhZGNlLTYzMWEtNDQyOS1hMGJiLWYxOWE3YTUxN2I0YSIsImlhdCI6MTcxMzQzMzgwMCwiZXhwIjoyMDI5MDA5ODAwfQ.jVFIR-MB7rNfUuJaUH-_CyDFZEHezzXiqcRcdrGd29o'

const storage = new StorageClient(URL, { Authorization: `Bearer ${KEY}` })
const newBucketName = `my-new-bucket-${Date.now()}`
Expand Down
35 changes: 32 additions & 3 deletions test/storageFileApi.test.ts
Expand Up @@ -10,7 +10,7 @@ import fetch from '@supabase/node-fetch'
// TODO: need to setup storage-api server for this test
const URL = 'http://localhost:8000/storage/v1'
const KEY =
'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJPbmxpbmUgSldUIEJ1aWxkZXIiLCJpYXQiOjE2ODA5NjcxMTUsImV4cCI6MTcxMjUwMzI1MywiYXVkIjoiIiwic3ViIjoiMzE3ZWFkY2UtNjMxYS00NDI5LWEwYmItZjE5YTdhNTE3YjRhIiwicm9sZSI6ImF1dGhlbnRpY2F0ZWQifQ.NNzc54y9cZ2QLUHVSrCPOcGE2E0i8ouldc-AaWLsI08'
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoiYXV0aGVudGljYXRlZCIsInN1YiI6IjMxN2VhZGNlLTYzMWEtNDQyOS1hMGJiLWYxOWE3YTUxN2I0YSIsImlhdCI6MTcxMzQzMzgwMCwiZXhwIjoyMDI5MDA5ODAwfQ.jVFIR-MB7rNfUuJaUH-_CyDFZEHezzXiqcRcdrGd29o'

const storage = new StorageClient(URL, { Authorization: `Bearer ${KEY}` })

Expand Down Expand Up @@ -214,8 +214,8 @@ describe('Object API', () => {
})
expect(res.error).toEqual({
error: 'invalid_mime_type',
message: 'mime type not supported',
statusCode: '422',
message: 'mime type image/jpeg is not supported',
statusCode: '415',
})
})

Expand Down Expand Up @@ -288,6 +288,23 @@ describe('Object API', () => {
expect(res.data?.message).toEqual(`Successfully moved`)
})

test('move object across buckets in different path', async () => {
const newBucketName = 'bucket-move'

const newPath = `testpath/file-to-move-${Date.now()}.txt`
const upload = await storage.from(bucketName).upload(uploadPath, file)

const res = await storage.from(bucketName).move(uploadPath, newPath, {
destinationBucket: newBucketName,
})

expect(res.error).toBeNull()
expect(res.data?.message).toEqual(`Successfully moved`)

const { error } = await storage.from(newBucketName).download(newPath)
expect(error).toBeNull()
})

test('copy object to different path', async () => {
const newPath = `testpath/file-copied-${Date.now()}.txt`
await storage.from(bucketName).upload(uploadPath, file)
Expand All @@ -297,6 +314,18 @@ describe('Object API', () => {
expect(res.data?.path).toEqual(`${bucketName}/${newPath}`)
})

test('copy object across buckets to different path', async () => {
const newBucketName = 'bucket-move'
const newPath = `testpath/file-copied-${Date.now()}.txt`
await storage.from(bucketName).upload(uploadPath, file)
const res = await storage.from(bucketName).copy(uploadPath, newPath, {
destinationBucket: newBucketName,
})

expect(res.error).toBeNull()
expect(res.data?.path).toEqual(`${newBucketName}/${newPath}`)
})

test('downloads an object', async () => {
await storage.from(bucketName).upload(uploadPath, file)
const res = await storage.from(bucketName).download(uploadPath)
Expand Down

0 comments on commit adb58bb

Please sign in to comment.