From 88eb33532ff4ba04db740fc868d7ceed9bc0e18f Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Mon, 12 Dec 2022 14:36:12 +0530 Subject: [PATCH 01/20] chore: port over infra from storage-js --- infra/docker-compose.yml | 88 ++++++++++++++++++++++ infra/kong/Dockerfile | 15 ++++ infra/kong/kong.yml | 29 ++++++++ infra/postgres/00-initial-schema.sql | 23 ++++++ infra/postgres/Dockerfile | 20 +++++ infra/postgres/auth-schema.sql | 89 ++++++++++++++++++++++ infra/postgres/dummy-data.sql | 56 ++++++++++++++ infra/postgres/storage-schema.sql | 107 +++++++++++++++++++++++++++ infra/storage/Dockerfile | 3 + 9 files changed, 430 insertions(+) create mode 100644 infra/docker-compose.yml create mode 100644 infra/kong/Dockerfile create mode 100644 infra/kong/kong.yml create mode 100644 infra/postgres/00-initial-schema.sql create mode 100644 infra/postgres/Dockerfile create mode 100644 infra/postgres/auth-schema.sql create mode 100644 infra/postgres/dummy-data.sql create mode 100644 infra/postgres/storage-schema.sql create mode 100644 infra/storage/Dockerfile diff --git a/infra/docker-compose.yml b/infra/docker-compose.yml new file mode 100644 index 00000000..ea54bf1d --- /dev/null +++ b/infra/docker-compose.yml @@ -0,0 +1,88 @@ +# docker-compose.yml + +version: '3.6' +services: + kong: + container_name: supabase-kong + build: + context: ./kong + environment: + KONG_DECLARATIVE_CONFIG: /var/lib/kong/kong.yml + KONG_PLUGINS: request-transformer,cors,key-auth,http-log + ports: + - 8000:8000/tcp + - 8443:8443/tcp + rest: + image: postgrest/postgrest:latest + ports: + - '3000:3000' + depends_on: + storage: + condition: service_healthy + restart: always + environment: + PGRST_DB_URI: postgres://postgres:postgres@db:5432/postgres + PGRST_DB_SCHEMA: public, storage + PGRST_DB_ANON_ROLE: postgres + PGRST_JWT_SECRET: super-secret-jwt-token-with-at-least-32-characters-long + storage: + build: + context: ./storage + ports: + - '5000:5000' + depends_on: + db: + condition: service_healthy + restart: always + environment: + ANON_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoiYW5vbiIsImlhdCI6MTYxMzUzMTk4NSwiZXhwIjoxOTI5MTA3OTg1fQ.ReNhHIoXIOa-8tL1DO3e26mJmOTnYuvdgobwIYGzrLQ + SERVICE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoic2VydmljZV9yb2xlIiwiaWF0IjoxNjEzNTMxOTg1LCJleHAiOjE5MjkxMDc5ODV9.FhK1kZdHmWdCIEZELt0QDCw6FIlCS8rVmp4RzaeI2LM + PROJECT_REF: bjwdssmqcnupljrqypxz # can be any random string + REGION: us-east-1 # region where your bucket is located + POSTGREST_URL: http://rest:3000 + GLOBAL_S3_BUCKET: supa-storage-testing # name of s3 bucket where you want to store objects + PGRST_JWT_SECRET: super-secret-jwt-token-with-at-least-32-characters-long + DATABASE_URL: postgres://postgres:postgres@db:5432/postgres + PGOPTIONS: "-c search_path=storage" + AWS_ACCESS_KEY_ID: replace-with-your-aws-key + AWS_SECRET_ACCESS_KEY: replace-with-your-aws-secret + FILE_SIZE_LIMIT: 52428800 + STORAGE_BACKEND: file + FILE_STORAGE_BACKEND_PATH: /tmp/storage + ENABLE_IMAGE_TRANSFORMATION: "true" + IMGPROXY_URL: http://imgproxy:8080 + volumes: + - assets-volume:/tmp/storage + healthcheck: + test: ['CMD-SHELL', 'curl -f -LI http://localhost:5000/status'] + db: + build: + context: ./postgres + ports: + - 5432:5432 + command: + - postgres + - -c + - wal_level=logical + environment: + POSTGRES_DB: postgres + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_PORT: 5432 + healthcheck: + test: [ "CMD-SHELL", "pg_isready" ] + interval: 10s + timeout: 5s + retries: 5 + + imgproxy: + image: darthsim/imgproxy + ports: + - 50020:8080 + volumes: + - assets-volume:/tmp/storage + environment: + - IMGPROXY_LOCAL_FILESYSTEM_ROOT=/ + - IMGPROXY_USE_ETAG=true +volumes: + assets-volume: \ No newline at end of file diff --git a/infra/kong/Dockerfile b/infra/kong/Dockerfile new file mode 100644 index 00000000..4597f903 --- /dev/null +++ b/infra/kong/Dockerfile @@ -0,0 +1,15 @@ +FROM kong:2.1 + +COPY kong.yml /var/lib/kong/kong.yml + +# Build time defaults +ARG build_KONG_DATABASE=off +ARG build_KONG_PLUGINS=request-transformer,cors,key-auth +ARG build_KONG_DECLARATIVE_CONFIG=/var/lib/kong/kong.yml + +# Run time values +ENV KONG_DATABASE=$build_KONG_DATABASE +ENV KONG_PLUGINS=$build_KONG_PLUGINS +ENV KONG_DECLARATIVE_CONFIG=$build_KONG_DECLARATIVE_CONFIG + +EXPOSE 8000 diff --git a/infra/kong/kong.yml b/infra/kong/kong.yml new file mode 100644 index 00000000..118003ce --- /dev/null +++ b/infra/kong/kong.yml @@ -0,0 +1,29 @@ +_format_version: '1.1' +services: + - name: rest-v1 + _comment: 'PosgREST: /rest/v1/* -> http://rest:3000/*' + url: http://rest:3000/ + routes: + - name: rest-v1-all + strip_path: true + paths: + - /rest/v1/ + plugins: + - name: cors + - name: key-auth + config: + hide_credentials: true + - name: storage-v1 + _comment: 'Storage: /storage/v1/* -> http://storage-api:5000/*' + url: http://storage:5000/ + routes: + - name: storage-v1-all + strip_path: true + paths: + - /storage/v1/ + plugins: + - name: cors +consumers: + - username: 'private-key' + keyauth_credentials: + - key: eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJhdWQiOiIiLCJzdWIiOiIiLCJSb2xlIjoicG9zdGdyZXMifQ.magCcozTMKNrl76Tj2dsM7XTl_YH0v0ilajzAvIlw3U diff --git a/infra/postgres/00-initial-schema.sql b/infra/postgres/00-initial-schema.sql new file mode 100644 index 00000000..737d8bb6 --- /dev/null +++ b/infra/postgres/00-initial-schema.sql @@ -0,0 +1,23 @@ +-- Set up reatime +create publication supabase_realtime for all tables; + +-- Extension namespacing +create schema extensions; +create extension if not exists "uuid-ossp" with schema extensions; +create extension if not exists pgcrypto with schema extensions; +create extension if not exists pgjwt with schema extensions; + +-- Developer roles +create role anon nologin noinherit; +create role authenticated nologin noinherit; -- "logged in" user: web_user, app_user, etc +create role service_role nologin noinherit bypassrls; -- allow developers to create JWT's that bypass their policies + +create user authenticator noinherit; +grant anon to authenticator; +grant authenticated to authenticator; +grant service_role to authenticator; + +grant usage on schema public to postgres, anon, authenticated, service_role; +alter default privileges in schema public grant all on tables to postgres, anon, authenticated, service_role; +alter default privileges in schema public grant all on functions to postgres, anon, authenticated, service_role; +alter default privileges in schema public grant all on sequences to postgres, anon, authenticated, service_role; \ No newline at end of file diff --git a/infra/postgres/Dockerfile b/infra/postgres/Dockerfile new file mode 100644 index 00000000..bb2198b8 --- /dev/null +++ b/infra/postgres/Dockerfile @@ -0,0 +1,20 @@ +FROM supabase/postgres:0.13.0 + +COPY 00-initial-schema.sql /docker-entrypoint-initdb.d/00-initial-schema.sql +COPY auth-schema.sql /docker-entrypoint-initdb.d/01-auth-schema.sql +COPY storage-schema.sql /docker-entrypoint-initdb.d/02-storage-schema.sql +COPY dummy-data.sql /docker-entrypoint-initdb.d/03-dummy-data.sql + +# Build time defaults +ARG build_POSTGRES_DB=postgres +ARG build_POSTGRES_USER=postgres +ARG build_POSTGRES_PASSWORD=postgres +ARG build_POSTGRES_PORT=5432 + +# Run time values +ENV POSTGRES_DB=$build_POSTGRES_DB +ENV POSTGRES_USER=$build_POSTGRES_USER +ENV POSTGRES_PASSWORD=$build_POSTGRES_PASSWORD +ENV POSTGRES_PORT=$build_POSTGRES_PORT + +EXPOSE 5432 \ No newline at end of file diff --git a/infra/postgres/auth-schema.sql b/infra/postgres/auth-schema.sql new file mode 100644 index 00000000..7a88c4b1 --- /dev/null +++ b/infra/postgres/auth-schema.sql @@ -0,0 +1,89 @@ +CREATE SCHEMA IF NOT EXISTS auth AUTHORIZATION postgres; + +-- auth.users definition +CREATE TABLE auth.users ( + instance_id uuid NULL, + id uuid NOT NULL, + aud varchar(255) NULL, + "role" varchar(255) NULL, + email varchar(255) NULL, + encrypted_password varchar(255) NULL, + confirmed_at timestamptz NULL, + invited_at timestamptz NULL, + confirmation_token varchar(255) NULL, + confirmation_sent_at timestamptz NULL, + recovery_token varchar(255) NULL, + recovery_sent_at timestamptz NULL, + email_change_token varchar(255) NULL, + email_change varchar(255) NULL, + email_change_sent_at timestamptz NULL, + last_sign_in_at timestamptz NULL, + raw_app_meta_data jsonb NULL, + raw_user_meta_data jsonb NULL, + is_super_admin bool NULL, + created_at timestamptz NULL, + updated_at timestamptz NULL, + CONSTRAINT users_pkey PRIMARY KEY (id) +); +CREATE INDEX users_instance_id_email_idx ON auth.users USING btree (instance_id, email); +CREATE INDEX users_instance_id_idx ON auth.users USING btree (instance_id); +-- auth.refresh_tokens definition +CREATE TABLE auth.refresh_tokens ( + instance_id uuid NULL, + id bigserial NOT NULL, + "token" varchar(255) NULL, + user_id varchar(255) NULL, + revoked bool NULL, + created_at timestamptz NULL, + updated_at timestamptz NULL, + CONSTRAINT refresh_tokens_pkey PRIMARY KEY (id) +); +CREATE INDEX refresh_tokens_instance_id_idx ON auth.refresh_tokens USING btree (instance_id); +CREATE INDEX refresh_tokens_instance_id_user_id_idx ON auth.refresh_tokens USING btree (instance_id, user_id); +CREATE INDEX refresh_tokens_token_idx ON auth.refresh_tokens USING btree (token); +-- auth.instances definition +CREATE TABLE auth.instances ( + id uuid NOT NULL, + uuid uuid NULL, + raw_base_config text NULL, + created_at timestamptz NULL, + updated_at timestamptz NULL, + CONSTRAINT instances_pkey PRIMARY KEY (id) +); +-- auth.audit_log_entries definition +CREATE TABLE auth.audit_log_entries ( + instance_id uuid NULL, + id uuid NOT NULL, + payload json NULL, + created_at timestamptz NULL, + CONSTRAINT audit_log_entries_pkey PRIMARY KEY (id) +); +CREATE INDEX audit_logs_instance_id_idx ON auth.audit_log_entries USING btree (instance_id); +-- auth.schema_migrations definition +CREATE TABLE auth.schema_migrations ( + "version" varchar(255) NOT NULL, + CONSTRAINT schema_migrations_pkey PRIMARY KEY ("version") +); +INSERT INTO auth.schema_migrations (version) +VALUES ('20171026211738'), + ('20171026211808'), + ('20171026211834'), + ('20180103212743'), + ('20180108183307'), + ('20180119214651'), + ('20180125194653'); +-- Gets the User ID from the request cookie +create or replace function auth.uid() returns uuid as $$ + select nullif(current_setting('request.jwt.claim.sub', true), '')::uuid; +$$ language sql stable; +-- Gets the User Role from the request cookie +create or replace function auth.role() returns text as $$ + select nullif(current_setting('request.jwt.claim.role', true), '')::text; +$$ language sql stable; +-- Gets the User Email from the request cookie +create or replace function auth.email() returns text as $$ + select nullif(current_setting('request.jwt.claim.email', true), '')::text; +$$ language sql stable; +GRANT ALL PRIVILEGES ON SCHEMA auth TO postgres; +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA auth TO postgres; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA auth TO postgres; \ No newline at end of file diff --git a/infra/postgres/dummy-data.sql b/infra/postgres/dummy-data.sql new file mode 100644 index 00000000..8aef8529 --- /dev/null +++ b/infra/postgres/dummy-data.sql @@ -0,0 +1,56 @@ +-- insert users +INSERT INTO "auth"."users" ("instance_id", "id", "aud", "role", "email", "encrypted_password", "confirmed_at", "invited_at", "confirmation_token", "confirmation_sent_at", "recovery_token", "recovery_sent_at", "email_change_token", "email_change", "email_change_sent_at", "last_sign_in_at", "raw_app_meta_data", "raw_user_meta_data", "is_super_admin", "created_at", "updated_at") VALUES +('00000000-0000-0000-0000-000000000000', '317eadce-631a-4429-a0bb-f19a7a517b4a', 'authenticated', 'authenticated', 'inian+user2@supabase.io', '', NULL, '2021-02-17 04:41:13.408828+00', '541rn7rTZPGeGCYsp0a38g', '2021-02-17 04:41:13.408828+00', '', NULL, '', '', NULL, NULL, '{"provider": "email"}', 'null', 'f', '2021-02-17 04:41:13.406912+00', '2021-02-17 04:41:13.406919+00'), +('00000000-0000-0000-0000-000000000000', '4d56e902-f0a0-4662-8448-a4d9e643c142', 'authenticated', 'authenticated', 'inian+user1@supabase.io', '', NULL, '2021-02-17 04:40:58.570482+00', 'U1HvzExEO3l7JzP-4tTxJA', '2021-02-17 04:40:58.570482+00', '', NULL, '', '', NULL, NULL, '{"provider": "email"}', 'null', 'f', '2021-02-17 04:40:58.568637+00', '2021-02-17 04:40:58.568642+00'), +('00000000-0000-0000-0000-000000000000', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', 'authenticated', 'authenticated', 'inian+admin@supabase.io', '', NULL, '2021-02-17 04:40:42.901743+00', '3EG99GjT_e3NC4eGEBXOjw', '2021-02-17 04:40:42.901743+00', '', NULL, '', '', NULL, NULL, '{"provider": "email"}', 'null', 'f', '2021-02-17 04:40:42.890632+00', '2021-02-17 04:40:42.890637+00'); + +-- insert buckets +INSERT INTO "storage"."buckets" ("id", "name", "owner", "created_at", "updated_at") VALUES +('bucket2', 'bucket2', '4d56e902-f0a0-4662-8448-a4d9e643c142', '2021-02-17 04:43:32.770206+00', '2021-02-17 04:43:32.770206+00'), +('bucket3', 'bucket3', '4d56e902-f0a0-4662-8448-a4d9e643c142', '2021-02-17 04:43:32.770206+00', '2021-02-17 04:43:32.770206+00'), +('bucket4', 'bucket4', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-25 09:23:01.58385+00', '2021-02-25 09:23:01.58385+00'), +('bucket5', 'bucket5', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00'); + + +-- insert objects +INSERT INTO "storage"."objects" ("id", "bucket_id", "name", "owner", "created_at", "updated_at", "last_accessed_at", "metadata") VALUES +('03e458f9-892f-4db2-8cb9-d3401a689e25', 'bucket2', 'public/sadcat-upload23.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-04 08:26:08.553748+00', '2021-03-04 08:26:08.553748+00', '2021-03-04 08:26:08.553748+00', '{"mimetype": "image/svg+xml", "size": 1234}'), +('070825af-a11d-44fe-9f1d-abdc76f686f2', 'bucket2', 'public/sadcat-upload.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-02 16:31:11.115996+00', '2021-03-02 16:31:11.115996+00', '2021-03-02 16:31:11.115996+00', '{"mimetype": "image/png", "size": 1234}'), +('0cac5609-11e1-4f21-b486-d0eeb60909f6', 'bucket2', 'curlimage.jpg', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', '2021-02-23 11:05:16.625075+00', '2021-02-23 11:05:16.625075+00', '2021-02-23 11:05:16.625075+00', '{"size": 1234}'), +('147c6795-94d5-4008-9d81-f7ba3b4f8a9f', 'bucket2', 'folder/only_uid.jpg', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', '2021-02-17 10:36:01.504227+00', '2021-02-17 11:03:03.049618+00', '2021-02-17 10:36:01.504227+00', '{"size": 1234}'), +('65a3aa9c-0ff2-4adc-85d0-eab673c27443', 'bucket2', 'authenticated/casestudy.png', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', '2021-02-17 10:42:19.366559+00', '2021-02-17 11:03:30.025116+00', '2021-02-17 10:42:19.366559+00', '{"size": 1234}'), +('10ABE273-D77A-4BDA-B410-6FC0CA3E6ADC', 'bucket2', 'authenticated/cat.jpg', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', '2021-02-17 10:42:19.366559+00', '2021-02-17 11:03:30.025116+00', '2021-02-17 10:42:19.366559+00', '{"size": 1234}'), +('1edccac7-0876-4e9f-89da-a08d2a5f654b', 'bucket2', 'authenticated/delete.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-02 16:31:11.115996+00', '2021-03-02 16:31:11.115996+00', '2021-03-02 16:31:11.115996+00', '{"mimetype": "image/png", "size": 1234}'), +('1a911f3c-8c1d-4661-93c1-8e065e4d757e', 'bucket2', 'authenticated/delete1.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), +('372d5d74-e24d-49dc-abe8-47d7eb226a2e', 'bucket2', 'authenticated/delete-multiple1.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), +('34811c1b-85e5-4eb6-a5e3-d607b2f6986e', 'bucket2', 'authenticated/delete-multiple2.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), +('45950ff2-d3a8-4add-8e49-bafc01198340', 'bucket2', 'authenticated/delete-multiple3.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), +('469b0216-5419-41f6-9a37-2abfd7fad29c', 'bucket2', 'authenticated/delete-multiple4.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), +('55930619-a668-4dbc-aea3-b93dfe101e7f', 'bucket2', 'authenticated/delete-multiple7.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), +('D1CE4E4F-03E2-473D-858B-301D7989B581', 'bucket2', 'authenticated/move-orig.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), +('222b3d1e-bc17-414c-b336-47894aa4d697', 'bucket2', 'authenticated/move-orig-2.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), +('8f7d643d-1e82-4d39-ae39-d9bd6b0cfe9c', 'bucket2', 'authenticated/move-orig-3.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), +('8377527d-3518-4dc8-8290-c6926470e795', 'bucket2', 'folder/subfolder/public-all-permissions.png', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', '2021-02-17 10:26:42.791214+00', '2021-02-17 11:03:30.025116+00', '2021-02-17 10:26:42.791214+00', '{"size": 1234}'), +('b39ae4ab-802b-4c42-9271-3f908c34363c', 'bucket2', 'private/sadcat-upload3.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '{"mimetype": "image/svg+xml", "size": 1234}'), +('8098E1AC-C744-4368-86DF-71B60CCDE221', 'bucket3', 'sadcat-upload3.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '{"mimetype": "image/svg+xml", "size": 1234}'), +('D3EB488E-94F4-46CD-86D3-242C13B95BAC', 'bucket3', 'sadcat-upload2.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '{"mimetype": "image/svg+xml", "size": 1234}'); + +-- add policies +-- allows user to CRUD all buckets +CREATE POLICY crud_buckets ON storage.buckets for all USING (auth.uid() = '317eadce-631a-4429-a0bb-f19a7a517b4a'); +-- allow public CRUD acccess to the public folder in bucket2 +CREATE POLICY crud_public_folder ON storage.objects for all USING (bucket_id='bucket2' and (storage.foldername(name))[1] = 'public'); +-- allow public CRUD acccess to a particular file in bucket2 +CREATE POLICY crud_public_file ON storage.objects for all USING (bucket_id='bucket2' and name = 'folder/subfolder/public-all-permissions.png'); +-- allow public CRUD acccess to a folder in bucket2 to a user with a given id +CREATE POLICY crud_uid_folder ON storage.objects for all USING (bucket_id='bucket2' and (storage.foldername(name))[1] = 'only_uid' and auth.uid() = 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2'); +-- allow public CRUD acccess to a file in bucket2 to a user with a given id +CREATE POLICY crud_uid_file ON storage.objects for all USING (bucket_id='bucket2' and name = 'folder/only_uid.jpg' and auth.uid() = 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2'); +-- allow CRUD acccess to a folder in bucket2 to all authenticated users +CREATE POLICY authenticated_folder ON storage.objects for all USING (bucket_id='bucket2' and (storage.foldername(name))[1] = 'authenticated' and auth.role() = 'authenticated'); +-- allow CRUD access to a folder in bucket2 to its owners +CREATE POLICY crud_owner_only ON storage.objects for all USING (bucket_id='bucket2' and (storage.foldername(name))[1] = 'only_owner' and owner = auth.uid()); +-- allow CRUD access to bucket4 +CREATE POLICY open_all_update ON storage.objects for all WITH CHECK (bucket_id='bucket4'); + +CREATE POLICY crud_my_bucket ON storage.objects for all USING (bucket_id='my-private-bucket' and auth.uid()::text = '317eadce-631a-4429-a0bb-f19a7a517b4a'); \ No newline at end of file diff --git a/infra/postgres/storage-schema.sql b/infra/postgres/storage-schema.sql new file mode 100644 index 00000000..b0c9e2a6 --- /dev/null +++ b/infra/postgres/storage-schema.sql @@ -0,0 +1,107 @@ +CREATE SCHEMA IF NOT EXISTS storage AUTHORIZATION postgres; + +grant usage on schema storage to postgres, anon, authenticated, service_role; +alter default privileges in schema storage grant all on tables to postgres, anon, authenticated, service_role; +alter default privileges in schema storage grant all on functions to postgres, anon, authenticated, service_role; +alter default privileges in schema storage grant all on sequences to postgres, anon, authenticated, service_role; + +DROP TABLE IF EXISTS "storage"."buckets"; +CREATE TABLE "storage"."buckets" ( + "id" text not NULL, + "name" text NOT NULL, + "owner" uuid, + "created_at" timestamptz DEFAULT now(), + "updated_at" timestamptz DEFAULT now(), + CONSTRAINT "buckets_owner_fkey" FOREIGN KEY ("owner") REFERENCES "auth"."users"("id"), + PRIMARY KEY ("id") +); +CREATE UNIQUE INDEX "bname" ON "storage"."buckets" USING BTREE ("name"); + +DROP TABLE IF EXISTS "storage"."objects"; +CREATE TABLE "storage"."objects" ( + "id" uuid NOT NULL DEFAULT extensions.uuid_generate_v4(), + "bucket_id" text, + "name" text, + "owner" uuid, + "created_at" timestamptz DEFAULT now(), + "updated_at" timestamptz DEFAULT now(), + "last_accessed_at" timestamptz DEFAULT now(), + "metadata" jsonb, + CONSTRAINT "objects_bucketId_fkey" FOREIGN KEY ("bucket_id") REFERENCES "storage"."buckets"("id"), + CONSTRAINT "objects_owner_fkey" FOREIGN KEY ("owner") REFERENCES "auth"."users"("id"), + PRIMARY KEY ("id") +); +CREATE UNIQUE INDEX "bucketid_objname" ON "storage"."objects" USING BTREE ("bucket_id","name"); +CREATE INDEX name_prefix_search ON storage.objects(name text_pattern_ops); + +ALTER TABLE storage.objects ENABLE ROW LEVEL SECURITY; + +CREATE OR REPLACE FUNCTION storage.foldername(name text) + RETURNS text[] + LANGUAGE plpgsql +AS $function$ +DECLARE +_parts text[]; +BEGIN + select string_to_array(name, '/') into _parts; + return _parts[1:array_length(_parts,1)-1]; +END +$function$; + +CREATE OR REPLACE FUNCTION storage.filename(name text) + RETURNS text + LANGUAGE plpgsql +AS $function$ +DECLARE +_parts text[]; +BEGIN + select string_to_array(name, '/') into _parts; + return _parts[array_length(_parts,1)]; +END +$function$; + +CREATE OR REPLACE FUNCTION storage.extension(name text) + RETURNS text + LANGUAGE plpgsql +AS $function$ +DECLARE +_parts text[]; +_filename text; +BEGIN + select string_to_array(name, '/') into _parts; + select _parts[array_length(_parts,1)] into _filename; + return split_part(_filename, '.', 2); +END +$function$; + +CREATE OR REPLACE FUNCTION storage.search(prefix text, bucketname text, limits int DEFAULT 100, levels int DEFAULT 1, offsets int DEFAULT 0) + RETURNS TABLE ( + name text, + id uuid, + updated_at TIMESTAMPTZ, + created_at TIMESTAMPTZ, + last_accessed_at TIMESTAMPTZ, + metadata jsonb + ) + LANGUAGE plpgsql +AS $function$ +BEGIN + return query + with files_folders as ( + select ((string_to_array(objects.name, '/'))[levels]) as folder + from objects + where objects.name ilike prefix || '%' + and bucket_id = bucketname + GROUP by folder + limit limits + offset offsets + ) + select files_folders.folder as name, objects.id, objects.updated_at, objects.created_at, objects.last_accessed_at, objects.metadata from files_folders + left join objects + on prefix || files_folders.folder = objects.name and objects.bucket_id=bucketname; +END +$function$; + +GRANT ALL PRIVILEGES ON SCHEMA storage TO postgres; +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA storage TO postgres; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA storage TO postgres; \ No newline at end of file diff --git a/infra/storage/Dockerfile b/infra/storage/Dockerfile new file mode 100644 index 00000000..31d66538 --- /dev/null +++ b/infra/storage/Dockerfile @@ -0,0 +1,3 @@ +FROM supabase/storage-api:v0.25.1 + +RUN apk add curl --no-cache \ No newline at end of file From 6be51ee4a5bedabce6a25d05b9a38ae65f40efef Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Tue, 13 Dec 2022 14:52:04 +0530 Subject: [PATCH 02/20] feat: add copy and transform option type --- storage3/_async/file_api.py | 38 ++++++++++++++++++++++++++++++++-- storage3/_sync/bucket.py | 2 +- storage3/_sync/file_api.py | 41 ++++++++++++++++++++++++++++++++++--- storage3/types.py | 11 +++++++++- storage3/utils.py | 1 + tests/_sync/test_client.py | 4 +++- 6 files changed, 89 insertions(+), 8 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 065c51ef..ab4db266 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -8,7 +8,7 @@ from httpx import HTTPError, Response from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS -from ..types import BaseBucket, ListBucketFilesOptions, RequestMethod +from ..types import BaseBucket, ListBucketFilesOptions, CreateSignedURLOptions, RequestMethod from ..utils import AsyncClient, StorageException __all__ = ["AsyncBucket"] @@ -44,7 +44,7 @@ async def _request( return response - async def create_signed_url(self, path: str, expires_in: int) -> dict[str, str]: + async def create_signed_url(self, path: str, expires_in: int, options: CreateSignedURLOptions = {}) -> dict[str, str]: """ Parameters ---------- @@ -65,6 +65,16 @@ async def create_signed_url(self, path: str, expires_in: int) -> dict[str, str]: ] = f"{self._client.base_url}{cast(str, data['signedURL']).lstrip('/')}" return data + async def create_signed_urls(self, paths: List[str], expires_in: int, options: dict[str, str]) ->dict[str, str]: + response = await self._request("POST", + f"/object/sign/{self.bucket_id}",json={ + "expires_in": expires_in, + "paths": paths}) + # TODO(joel): add support for download option + return response.json() + + pass + async def get_public_url(self, path: str) -> str: """ Parameters @@ -97,6 +107,29 @@ async def move(self, from_path: str, to_path: str) -> dict[str, str]: ) return res.json() + async def copy(self, from_path: str, to_path: str) -> dict[str, str]: + """ + Copies an existing file to a new path in the same bucket. + + Parameters + ---------- + from_path + The original file path, including the current file name. For example `folder/image.png`. + to_path + The new file path, including the new file name. For example `folder/image-copy.png`. + """ + res = await self._request( + "POST", + "/object/copy", + json={ + "bucketId": self.id, + "sourceKey": from_path, + "destinationKey": to_path + } + ) + return res.json() + + async def remove(self, paths: list) -> dict[str, str]: """ Deletes files within the same bucket @@ -200,6 +233,7 @@ def _get_final_path(self, path: str) -> str: return f"{self.id}/{path}" + # this class is returned by methods that fetch buckets, for example StorageBucketAPI.get_bucket # adding this mixin on the BaseBucket means that those bucket objects can also be used to # run methods like `upload` and `download` diff --git a/storage3/_sync/bucket.py b/storage3/_sync/bucket.py index efbd6cd8..1dbfd81b 100644 --- a/storage3/_sync/bucket.py +++ b/storage3/_sync/bucket.py @@ -5,7 +5,7 @@ from httpx import HTTPError, Response from ..types import RequestMethod -from ..utils import StorageException, SyncClient +from ..utils import SyncClient, StorageException from .file_api import SyncBucket __all__ = ["SyncStorageBucketAPI"] diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index ca046e98..7c21b2ef 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -8,8 +8,8 @@ from httpx import HTTPError, Response from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS -from ..types import BaseBucket, ListBucketFilesOptions, RequestMethod -from ..utils import StorageException, SyncClient +from ..types import BaseBucket, ListBucketFilesOptions, CreateSignedURLOptions, RequestMethod +from ..utils import SyncClient, StorageException __all__ = ["SyncBucket"] @@ -44,7 +44,7 @@ def _request( return response - def create_signed_url(self, path: str, expires_in: int) -> dict[str, str]: + def create_signed_url(self, path: str, expires_in: int, options: CreateSignedURLOptions = {}) -> dict[str, str]: """ Parameters ---------- @@ -65,6 +65,16 @@ def create_signed_url(self, path: str, expires_in: int) -> dict[str, str]: ] = f"{self._client.base_url}{cast(str, data['signedURL']).lstrip('/')}" return data + def create_signed_urls(self, paths: List[str], expires_in: int, options: dict[str, str]) ->dict[str, str]: + response = self._request("POST", + f"/object/sign/{self.bucket_id}",json={ + "expires_in": expires_in, + "paths": paths}) + # TODO(joel): add support for download option + return response.json() + + pass + def get_public_url(self, path: str) -> str: """ Parameters @@ -97,6 +107,29 @@ def move(self, from_path: str, to_path: str) -> dict[str, str]: ) return res.json() + def copy(self, from_path: str, to_path: str) -> dict[str, str]: + """ + Copies an existing file to a new path in the same bucket. + + Parameters + ---------- + from_path + The original file path, including the current file name. For example `folder/image.png`. + to_path + The new file path, including the new file name. For example `folder/image-copy.png`. + """ + res = self._request( + "POST", + "/object/copy", + json={ + "bucketId": self.id, + "sourceKey": from_path, + "destinationKey": to_path + } + ) + return res.json() + + def remove(self, paths: list) -> dict[str, str]: """ Deletes files within the same bucket @@ -188,6 +221,7 @@ def upload( files = {"file": (filename, open(file, "rb"), headers.pop("content-type"))} _path = self._get_final_path(path) + return self._request( "POST", f"/object/{_path}", @@ -199,6 +233,7 @@ def _get_final_path(self, path: str) -> str: return f"{self.id}/{path}" + # this class is returned by methods that fetch buckets, for example StorageBucketAPI.get_bucket # adding this mixin on the BaseBucket means that those bucket objects can also be used to # run methods like `upload` and `download` diff --git a/storage3/types.py b/storage3/types.py index 02da520b..abe55374 100644 --- a/storage3/types.py +++ b/storage3/types.py @@ -2,7 +2,7 @@ from datetime import datetime import dateutil.parser -from typing_extensions import Literal, TypedDict +from typing_extensions import Literal, TypedDict, Union, Optional RequestMethod = Literal["GET", "POST", "DELETE", "PUT", "HEAD"] @@ -35,3 +35,12 @@ class ListBucketFilesOptions(TypedDict): limit: int offset: int sortBy: _sortByType + +class TransformOptions(TypedDict): + height: Optional[float] + width: Optional[float] + resize: Optional[Literal['cover'] | Literal['contain'] | Literal['fill']] + +class CreateSignedURLOptions(TypedDict): + download: Optional[str | bool] + transform: Optional[TransformOptions] diff --git a/storage3/utils.py b/storage3/utils.py index 28fa9ea5..221dcff0 100644 --- a/storage3/utils.py +++ b/storage3/utils.py @@ -9,5 +9,6 @@ def aclose(self) -> None: self.close() + class StorageException(Exception): """Error raised when an operation on the storage API fails.""" diff --git a/tests/_sync/test_client.py b/tests/_sync/test_client.py index d3622128..30154626 100644 --- a/tests/_sync/test_client.py +++ b/tests/_sync/test_client.py @@ -79,7 +79,9 @@ def bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: @pytest.fixture(scope="module") -def public_bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: +def public_bucket( + storage: SyncStorageClient, uuid_factory: Callable[[], str] +) -> str: """Creates a test public bucket which will be used in the whole storage tests run and deleted at the end""" bucket_id = uuid_factory() From 7352f6128a1bd52e863dea0cf8db6f53519e6c78 Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Tue, 13 Dec 2022 18:45:02 +0530 Subject: [PATCH 03/20] fix: add transform options on public url and download --- storage3/_async/file_api.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index ab4db266..04b2a304 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -1,4 +1,5 @@ from __future__ import annotations +import urllib.parse from dataclasses import dataclass, field from io import BufferedReader, FileIO @@ -8,7 +9,7 @@ from httpx import HTTPError, Response from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS -from ..types import BaseBucket, ListBucketFilesOptions, CreateSignedURLOptions, RequestMethod +from ..types import BaseBucket, ListBucketFilesOptions, CreateSignedURLOptions, TransformOptions, RequestMethod from ..utils import AsyncClient, StorageException __all__ = ["AsyncBucket"] @@ -75,15 +76,18 @@ async def create_signed_urls(self, paths: List[str], expires_in: int, options: d pass - async def get_public_url(self, path: str) -> str: + async def get_public_url(self, path: str, options: TransformOptions = {}) -> str: """ Parameters ---------- path file path, including the path and file name. For example `folder/image.png`. """ + render_path = 'render/image/authenticated' if options.get('transform') else 'object' + transformation_query = urllib.parse.urlencode(options) + query_string = f"?{transformation_query}" if transformation_query else '' _path = self._get_final_path(path) - return f"{self._client.base_url}object/public/{_path}" + return f"{self._client.base_url}{render_path}/public/{_path}/${query_string}" async def move(self, from_path: str, to_path: str) -> dict[str, str]: """ @@ -172,7 +176,7 @@ async def list( ) return response.json() - async def download(self, path: str) -> bytes: + async def download(self, path: str, options: TransformOptions={}) -> bytes: """ Downloads a file. @@ -181,10 +185,14 @@ async def download(self, path: str) -> bytes: path The file path to be downloaded, including the path and file name. For example `folder/image.png`. """ + render_path = 'render/image/authenticated' if options.get('transform') else 'object' + transformation_query = urllib.parse.urlencode(options) + query_string = f"?{transformation_query}" if transformation_query else '' + _path = self._get_final_path(path) response = await self._request( "GET", - f"/object/{_path}", + f"{render_path}/{_path}{query_string}", ) return response.content From 122b2a3403dfa1fa33dd384b2a7c42e9f3094f9e Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Tue, 13 Dec 2022 18:46:03 +0530 Subject: [PATCH 04/20] feat: add transform options to signed_url,download, and public_url --- storage3/_async/file_api.py | 57 ++++++++++++++++++++------------- storage3/_sync/file_api.py | 63 ++++++++++++++++++++++++------------- storage3/types.py | 4 ++- storage3/utils.py | 1 - tests/_sync/test_client.py | 4 +-- 5 files changed, 81 insertions(+), 48 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 04b2a304..812070f1 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -9,7 +9,13 @@ from httpx import HTTPError, Response from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS -from ..types import BaseBucket, ListBucketFilesOptions, CreateSignedURLOptions, TransformOptions, RequestMethod +from ..types import ( + BaseBucket, + ListBucketFilesOptions, + CreateSignedURLOptions, + TransformOptions, + RequestMethod, +) from ..utils import AsyncClient, StorageException __all__ = ["AsyncBucket"] @@ -45,7 +51,9 @@ async def _request( return response - async def create_signed_url(self, path: str, expires_in: int, options: CreateSignedURLOptions = {}) -> dict[str, str]: + async def create_signed_url( + self, path: str, expires_in: int, options: CreateSignedURLOptions = {} + ) -> dict[str, str]: """ Parameters ---------- @@ -66,11 +74,14 @@ async def create_signed_url(self, path: str, expires_in: int, options: CreateSig ] = f"{self._client.base_url}{cast(str, data['signedURL']).lstrip('/')}" return data - async def create_signed_urls(self, paths: List[str], expires_in: int, options: dict[str, str]) ->dict[str, str]: - response = await self._request("POST", - f"/object/sign/{self.bucket_id}",json={ - "expires_in": expires_in, - "paths": paths}) + async def create_signed_urls( + self, paths: List[str], expires_in: int, options: dict[str, str] + ) -> dict[str, str]: + response = await self._request( + "POST", + f"/object/sign/{self.bucket_id}", + json={"expires_in": expires_in, "paths": paths}, + ) # TODO(joel): add support for download option return response.json() @@ -83,9 +94,11 @@ async def get_public_url(self, path: str, options: TransformOptions = {}) -> str path file path, including the path and file name. For example `folder/image.png`. """ - render_path = 'render/image/authenticated' if options.get('transform') else 'object' + render_path = ( + "render/image/authenticated" if options.get("transform") else "object" + ) transformation_query = urllib.parse.urlencode(options) - query_string = f"?{transformation_query}" if transformation_query else '' + query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) return f"{self._client.base_url}{render_path}/public/{_path}/${query_string}" @@ -123,17 +136,16 @@ async def copy(self, from_path: str, to_path: str) -> dict[str, str]: The new file path, including the new file name. For example `folder/image-copy.png`. """ res = await self._request( - "POST", - "/object/copy", - json={ - "bucketId": self.id, - "sourceKey": from_path, - "destinationKey": to_path - } - ) + "POST", + "/object/copy", + json={ + "bucketId": self.id, + "sourceKey": from_path, + "destinationKey": to_path, + }, + ) return res.json() - async def remove(self, paths: list) -> dict[str, str]: """ Deletes files within the same bucket @@ -176,7 +188,7 @@ async def list( ) return response.json() - async def download(self, path: str, options: TransformOptions={}) -> bytes: + async def download(self, path: str, options: TransformOptions = {}) -> bytes: """ Downloads a file. @@ -185,9 +197,11 @@ async def download(self, path: str, options: TransformOptions={}) -> bytes: path The file path to be downloaded, including the path and file name. For example `folder/image.png`. """ - render_path = 'render/image/authenticated' if options.get('transform') else 'object' + render_path = ( + "render/image/authenticated" if options.get("transform") else "object" + ) transformation_query = urllib.parse.urlencode(options) - query_string = f"?{transformation_query}" if transformation_query else '' + query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) response = await self._request( @@ -241,7 +255,6 @@ def _get_final_path(self, path: str) -> str: return f"{self.id}/{path}" - # this class is returned by methods that fetch buckets, for example StorageBucketAPI.get_bucket # adding this mixin on the BaseBucket means that those bucket objects can also be used to # run methods like `upload` and `download` diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 7c21b2ef..35f4f740 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -1,4 +1,5 @@ from __future__ import annotations +import urllib.parse from dataclasses import dataclass, field from io import BufferedReader, FileIO @@ -8,7 +9,13 @@ from httpx import HTTPError, Response from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS -from ..types import BaseBucket, ListBucketFilesOptions, CreateSignedURLOptions, RequestMethod +from ..types import ( + BaseBucket, + ListBucketFilesOptions, + CreateSignedURLOptions, + TransformOptions, + RequestMethod, +) from ..utils import SyncClient, StorageException __all__ = ["SyncBucket"] @@ -44,7 +51,9 @@ def _request( return response - def create_signed_url(self, path: str, expires_in: int, options: CreateSignedURLOptions = {}) -> dict[str, str]: + def create_signed_url( + self, path: str, expires_in: int, options: CreateSignedURLOptions = {} + ) -> dict[str, str]: """ Parameters ---------- @@ -65,25 +74,33 @@ def create_signed_url(self, path: str, expires_in: int, options: CreateSignedURL ] = f"{self._client.base_url}{cast(str, data['signedURL']).lstrip('/')}" return data - def create_signed_urls(self, paths: List[str], expires_in: int, options: dict[str, str]) ->dict[str, str]: - response = self._request("POST", - f"/object/sign/{self.bucket_id}",json={ - "expires_in": expires_in, - "paths": paths}) + def create_signed_urls( + self, paths: List[str], expires_in: int, options: dict[str, str] + ) -> dict[str, str]: + response = self._request( + "POST", + f"/object/sign/{self.bucket_id}", + json={"expires_in": expires_in, "paths": paths}, + ) # TODO(joel): add support for download option return response.json() pass - def get_public_url(self, path: str) -> str: + def get_public_url(self, path: str, options: TransformOptions = {}) -> str: """ Parameters ---------- path file path, including the path and file name. For example `folder/image.png`. """ + render_path = ( + "render/image/authenticated" if options.get("transform") else "object" + ) + transformation_query = urllib.parse.urlencode(options) + query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) - return f"{self._client.base_url}object/public/{_path}" + return f"{self._client.base_url}{render_path}/public/{_path}/${query_string}" def move(self, from_path: str, to_path: str) -> dict[str, str]: """ @@ -119,17 +136,16 @@ def copy(self, from_path: str, to_path: str) -> dict[str, str]: The new file path, including the new file name. For example `folder/image-copy.png`. """ res = self._request( - "POST", - "/object/copy", - json={ - "bucketId": self.id, - "sourceKey": from_path, - "destinationKey": to_path - } - ) + "POST", + "/object/copy", + json={ + "bucketId": self.id, + "sourceKey": from_path, + "destinationKey": to_path, + }, + ) return res.json() - def remove(self, paths: list) -> dict[str, str]: """ Deletes files within the same bucket @@ -172,7 +188,7 @@ def list( ) return response.json() - def download(self, path: str) -> bytes: + def download(self, path: str, options: TransformOptions = {}) -> bytes: """ Downloads a file. @@ -181,10 +197,16 @@ def download(self, path: str) -> bytes: path The file path to be downloaded, including the path and file name. For example `folder/image.png`. """ + render_path = ( + "render/image/authenticated" if options.get("transform") else "object" + ) + transformation_query = urllib.parse.urlencode(options) + query_string = f"?{transformation_query}" if transformation_query else "" + _path = self._get_final_path(path) response = self._request( "GET", - f"/object/{_path}", + f"{render_path}/{_path}{query_string}", ) return response.content @@ -233,7 +255,6 @@ def _get_final_path(self, path: str) -> str: return f"{self.id}/{path}" - # this class is returned by methods that fetch buckets, for example StorageBucketAPI.get_bucket # adding this mixin on the BaseBucket means that those bucket objects can also be used to # run methods like `upload` and `download` diff --git a/storage3/types.py b/storage3/types.py index abe55374..2d82d6e0 100644 --- a/storage3/types.py +++ b/storage3/types.py @@ -36,10 +36,12 @@ class ListBucketFilesOptions(TypedDict): offset: int sortBy: _sortByType + class TransformOptions(TypedDict): height: Optional[float] width: Optional[float] - resize: Optional[Literal['cover'] | Literal['contain'] | Literal['fill']] + resize: Optional[Literal["cover"] | Literal["contain"] | Literal["fill"]] + class CreateSignedURLOptions(TypedDict): download: Optional[str | bool] diff --git a/storage3/utils.py b/storage3/utils.py index 221dcff0..28fa9ea5 100644 --- a/storage3/utils.py +++ b/storage3/utils.py @@ -9,6 +9,5 @@ def aclose(self) -> None: self.close() - class StorageException(Exception): """Error raised when an operation on the storage API fails.""" diff --git a/tests/_sync/test_client.py b/tests/_sync/test_client.py index 30154626..d3622128 100644 --- a/tests/_sync/test_client.py +++ b/tests/_sync/test_client.py @@ -79,9 +79,7 @@ def bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: @pytest.fixture(scope="module") -def public_bucket( - storage: SyncStorageClient, uuid_factory: Callable[[], str] -) -> str: +def public_bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: """Creates a test public bucket which will be used in the whole storage tests run and deleted at the end""" bucket_id = uuid_factory() From 9b967ce41d3598a68b08cbad45435a26fc0d5f0b Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 10:51:20 +0800 Subject: [PATCH 05/20] fix: omit infra changes --- infra/docker-compose.yml | 88 ---------------------- infra/kong/Dockerfile | 15 ---- infra/kong/kong.yml | 29 -------- infra/postgres/00-initial-schema.sql | 23 ------ infra/postgres/Dockerfile | 20 ----- infra/postgres/auth-schema.sql | 89 ---------------------- infra/postgres/dummy-data.sql | 56 -------------- infra/postgres/storage-schema.sql | 107 --------------------------- infra/storage/Dockerfile | 3 - storage3/_async/file_api.py | 2 - 10 files changed, 432 deletions(-) delete mode 100644 infra/docker-compose.yml delete mode 100644 infra/kong/Dockerfile delete mode 100644 infra/kong/kong.yml delete mode 100644 infra/postgres/00-initial-schema.sql delete mode 100644 infra/postgres/Dockerfile delete mode 100644 infra/postgres/auth-schema.sql delete mode 100644 infra/postgres/dummy-data.sql delete mode 100644 infra/postgres/storage-schema.sql delete mode 100644 infra/storage/Dockerfile diff --git a/infra/docker-compose.yml b/infra/docker-compose.yml deleted file mode 100644 index ea54bf1d..00000000 --- a/infra/docker-compose.yml +++ /dev/null @@ -1,88 +0,0 @@ -# docker-compose.yml - -version: '3.6' -services: - kong: - container_name: supabase-kong - build: - context: ./kong - environment: - KONG_DECLARATIVE_CONFIG: /var/lib/kong/kong.yml - KONG_PLUGINS: request-transformer,cors,key-auth,http-log - ports: - - 8000:8000/tcp - - 8443:8443/tcp - rest: - image: postgrest/postgrest:latest - ports: - - '3000:3000' - depends_on: - storage: - condition: service_healthy - restart: always - environment: - PGRST_DB_URI: postgres://postgres:postgres@db:5432/postgres - PGRST_DB_SCHEMA: public, storage - PGRST_DB_ANON_ROLE: postgres - PGRST_JWT_SECRET: super-secret-jwt-token-with-at-least-32-characters-long - storage: - build: - context: ./storage - ports: - - '5000:5000' - depends_on: - db: - condition: service_healthy - restart: always - environment: - ANON_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoiYW5vbiIsImlhdCI6MTYxMzUzMTk4NSwiZXhwIjoxOTI5MTA3OTg1fQ.ReNhHIoXIOa-8tL1DO3e26mJmOTnYuvdgobwIYGzrLQ - SERVICE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoic2VydmljZV9yb2xlIiwiaWF0IjoxNjEzNTMxOTg1LCJleHAiOjE5MjkxMDc5ODV9.FhK1kZdHmWdCIEZELt0QDCw6FIlCS8rVmp4RzaeI2LM - PROJECT_REF: bjwdssmqcnupljrqypxz # can be any random string - REGION: us-east-1 # region where your bucket is located - POSTGREST_URL: http://rest:3000 - GLOBAL_S3_BUCKET: supa-storage-testing # name of s3 bucket where you want to store objects - PGRST_JWT_SECRET: super-secret-jwt-token-with-at-least-32-characters-long - DATABASE_URL: postgres://postgres:postgres@db:5432/postgres - PGOPTIONS: "-c search_path=storage" - AWS_ACCESS_KEY_ID: replace-with-your-aws-key - AWS_SECRET_ACCESS_KEY: replace-with-your-aws-secret - FILE_SIZE_LIMIT: 52428800 - STORAGE_BACKEND: file - FILE_STORAGE_BACKEND_PATH: /tmp/storage - ENABLE_IMAGE_TRANSFORMATION: "true" - IMGPROXY_URL: http://imgproxy:8080 - volumes: - - assets-volume:/tmp/storage - healthcheck: - test: ['CMD-SHELL', 'curl -f -LI http://localhost:5000/status'] - db: - build: - context: ./postgres - ports: - - 5432:5432 - command: - - postgres - - -c - - wal_level=logical - environment: - POSTGRES_DB: postgres - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_PORT: 5432 - healthcheck: - test: [ "CMD-SHELL", "pg_isready" ] - interval: 10s - timeout: 5s - retries: 5 - - imgproxy: - image: darthsim/imgproxy - ports: - - 50020:8080 - volumes: - - assets-volume:/tmp/storage - environment: - - IMGPROXY_LOCAL_FILESYSTEM_ROOT=/ - - IMGPROXY_USE_ETAG=true -volumes: - assets-volume: \ No newline at end of file diff --git a/infra/kong/Dockerfile b/infra/kong/Dockerfile deleted file mode 100644 index 4597f903..00000000 --- a/infra/kong/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM kong:2.1 - -COPY kong.yml /var/lib/kong/kong.yml - -# Build time defaults -ARG build_KONG_DATABASE=off -ARG build_KONG_PLUGINS=request-transformer,cors,key-auth -ARG build_KONG_DECLARATIVE_CONFIG=/var/lib/kong/kong.yml - -# Run time values -ENV KONG_DATABASE=$build_KONG_DATABASE -ENV KONG_PLUGINS=$build_KONG_PLUGINS -ENV KONG_DECLARATIVE_CONFIG=$build_KONG_DECLARATIVE_CONFIG - -EXPOSE 8000 diff --git a/infra/kong/kong.yml b/infra/kong/kong.yml deleted file mode 100644 index 118003ce..00000000 --- a/infra/kong/kong.yml +++ /dev/null @@ -1,29 +0,0 @@ -_format_version: '1.1' -services: - - name: rest-v1 - _comment: 'PosgREST: /rest/v1/* -> http://rest:3000/*' - url: http://rest:3000/ - routes: - - name: rest-v1-all - strip_path: true - paths: - - /rest/v1/ - plugins: - - name: cors - - name: key-auth - config: - hide_credentials: true - - name: storage-v1 - _comment: 'Storage: /storage/v1/* -> http://storage-api:5000/*' - url: http://storage:5000/ - routes: - - name: storage-v1-all - strip_path: true - paths: - - /storage/v1/ - plugins: - - name: cors -consumers: - - username: 'private-key' - keyauth_credentials: - - key: eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJhdWQiOiIiLCJzdWIiOiIiLCJSb2xlIjoicG9zdGdyZXMifQ.magCcozTMKNrl76Tj2dsM7XTl_YH0v0ilajzAvIlw3U diff --git a/infra/postgres/00-initial-schema.sql b/infra/postgres/00-initial-schema.sql deleted file mode 100644 index 737d8bb6..00000000 --- a/infra/postgres/00-initial-schema.sql +++ /dev/null @@ -1,23 +0,0 @@ --- Set up reatime -create publication supabase_realtime for all tables; - --- Extension namespacing -create schema extensions; -create extension if not exists "uuid-ossp" with schema extensions; -create extension if not exists pgcrypto with schema extensions; -create extension if not exists pgjwt with schema extensions; - --- Developer roles -create role anon nologin noinherit; -create role authenticated nologin noinherit; -- "logged in" user: web_user, app_user, etc -create role service_role nologin noinherit bypassrls; -- allow developers to create JWT's that bypass their policies - -create user authenticator noinherit; -grant anon to authenticator; -grant authenticated to authenticator; -grant service_role to authenticator; - -grant usage on schema public to postgres, anon, authenticated, service_role; -alter default privileges in schema public grant all on tables to postgres, anon, authenticated, service_role; -alter default privileges in schema public grant all on functions to postgres, anon, authenticated, service_role; -alter default privileges in schema public grant all on sequences to postgres, anon, authenticated, service_role; \ No newline at end of file diff --git a/infra/postgres/Dockerfile b/infra/postgres/Dockerfile deleted file mode 100644 index bb2198b8..00000000 --- a/infra/postgres/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM supabase/postgres:0.13.0 - -COPY 00-initial-schema.sql /docker-entrypoint-initdb.d/00-initial-schema.sql -COPY auth-schema.sql /docker-entrypoint-initdb.d/01-auth-schema.sql -COPY storage-schema.sql /docker-entrypoint-initdb.d/02-storage-schema.sql -COPY dummy-data.sql /docker-entrypoint-initdb.d/03-dummy-data.sql - -# Build time defaults -ARG build_POSTGRES_DB=postgres -ARG build_POSTGRES_USER=postgres -ARG build_POSTGRES_PASSWORD=postgres -ARG build_POSTGRES_PORT=5432 - -# Run time values -ENV POSTGRES_DB=$build_POSTGRES_DB -ENV POSTGRES_USER=$build_POSTGRES_USER -ENV POSTGRES_PASSWORD=$build_POSTGRES_PASSWORD -ENV POSTGRES_PORT=$build_POSTGRES_PORT - -EXPOSE 5432 \ No newline at end of file diff --git a/infra/postgres/auth-schema.sql b/infra/postgres/auth-schema.sql deleted file mode 100644 index 7a88c4b1..00000000 --- a/infra/postgres/auth-schema.sql +++ /dev/null @@ -1,89 +0,0 @@ -CREATE SCHEMA IF NOT EXISTS auth AUTHORIZATION postgres; - --- auth.users definition -CREATE TABLE auth.users ( - instance_id uuid NULL, - id uuid NOT NULL, - aud varchar(255) NULL, - "role" varchar(255) NULL, - email varchar(255) NULL, - encrypted_password varchar(255) NULL, - confirmed_at timestamptz NULL, - invited_at timestamptz NULL, - confirmation_token varchar(255) NULL, - confirmation_sent_at timestamptz NULL, - recovery_token varchar(255) NULL, - recovery_sent_at timestamptz NULL, - email_change_token varchar(255) NULL, - email_change varchar(255) NULL, - email_change_sent_at timestamptz NULL, - last_sign_in_at timestamptz NULL, - raw_app_meta_data jsonb NULL, - raw_user_meta_data jsonb NULL, - is_super_admin bool NULL, - created_at timestamptz NULL, - updated_at timestamptz NULL, - CONSTRAINT users_pkey PRIMARY KEY (id) -); -CREATE INDEX users_instance_id_email_idx ON auth.users USING btree (instance_id, email); -CREATE INDEX users_instance_id_idx ON auth.users USING btree (instance_id); --- auth.refresh_tokens definition -CREATE TABLE auth.refresh_tokens ( - instance_id uuid NULL, - id bigserial NOT NULL, - "token" varchar(255) NULL, - user_id varchar(255) NULL, - revoked bool NULL, - created_at timestamptz NULL, - updated_at timestamptz NULL, - CONSTRAINT refresh_tokens_pkey PRIMARY KEY (id) -); -CREATE INDEX refresh_tokens_instance_id_idx ON auth.refresh_tokens USING btree (instance_id); -CREATE INDEX refresh_tokens_instance_id_user_id_idx ON auth.refresh_tokens USING btree (instance_id, user_id); -CREATE INDEX refresh_tokens_token_idx ON auth.refresh_tokens USING btree (token); --- auth.instances definition -CREATE TABLE auth.instances ( - id uuid NOT NULL, - uuid uuid NULL, - raw_base_config text NULL, - created_at timestamptz NULL, - updated_at timestamptz NULL, - CONSTRAINT instances_pkey PRIMARY KEY (id) -); --- auth.audit_log_entries definition -CREATE TABLE auth.audit_log_entries ( - instance_id uuid NULL, - id uuid NOT NULL, - payload json NULL, - created_at timestamptz NULL, - CONSTRAINT audit_log_entries_pkey PRIMARY KEY (id) -); -CREATE INDEX audit_logs_instance_id_idx ON auth.audit_log_entries USING btree (instance_id); --- auth.schema_migrations definition -CREATE TABLE auth.schema_migrations ( - "version" varchar(255) NOT NULL, - CONSTRAINT schema_migrations_pkey PRIMARY KEY ("version") -); -INSERT INTO auth.schema_migrations (version) -VALUES ('20171026211738'), - ('20171026211808'), - ('20171026211834'), - ('20180103212743'), - ('20180108183307'), - ('20180119214651'), - ('20180125194653'); --- Gets the User ID from the request cookie -create or replace function auth.uid() returns uuid as $$ - select nullif(current_setting('request.jwt.claim.sub', true), '')::uuid; -$$ language sql stable; --- Gets the User Role from the request cookie -create or replace function auth.role() returns text as $$ - select nullif(current_setting('request.jwt.claim.role', true), '')::text; -$$ language sql stable; --- Gets the User Email from the request cookie -create or replace function auth.email() returns text as $$ - select nullif(current_setting('request.jwt.claim.email', true), '')::text; -$$ language sql stable; -GRANT ALL PRIVILEGES ON SCHEMA auth TO postgres; -GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA auth TO postgres; -GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA auth TO postgres; \ No newline at end of file diff --git a/infra/postgres/dummy-data.sql b/infra/postgres/dummy-data.sql deleted file mode 100644 index 8aef8529..00000000 --- a/infra/postgres/dummy-data.sql +++ /dev/null @@ -1,56 +0,0 @@ --- insert users -INSERT INTO "auth"."users" ("instance_id", "id", "aud", "role", "email", "encrypted_password", "confirmed_at", "invited_at", "confirmation_token", "confirmation_sent_at", "recovery_token", "recovery_sent_at", "email_change_token", "email_change", "email_change_sent_at", "last_sign_in_at", "raw_app_meta_data", "raw_user_meta_data", "is_super_admin", "created_at", "updated_at") VALUES -('00000000-0000-0000-0000-000000000000', '317eadce-631a-4429-a0bb-f19a7a517b4a', 'authenticated', 'authenticated', 'inian+user2@supabase.io', '', NULL, '2021-02-17 04:41:13.408828+00', '541rn7rTZPGeGCYsp0a38g', '2021-02-17 04:41:13.408828+00', '', NULL, '', '', NULL, NULL, '{"provider": "email"}', 'null', 'f', '2021-02-17 04:41:13.406912+00', '2021-02-17 04:41:13.406919+00'), -('00000000-0000-0000-0000-000000000000', '4d56e902-f0a0-4662-8448-a4d9e643c142', 'authenticated', 'authenticated', 'inian+user1@supabase.io', '', NULL, '2021-02-17 04:40:58.570482+00', 'U1HvzExEO3l7JzP-4tTxJA', '2021-02-17 04:40:58.570482+00', '', NULL, '', '', NULL, NULL, '{"provider": "email"}', 'null', 'f', '2021-02-17 04:40:58.568637+00', '2021-02-17 04:40:58.568642+00'), -('00000000-0000-0000-0000-000000000000', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', 'authenticated', 'authenticated', 'inian+admin@supabase.io', '', NULL, '2021-02-17 04:40:42.901743+00', '3EG99GjT_e3NC4eGEBXOjw', '2021-02-17 04:40:42.901743+00', '', NULL, '', '', NULL, NULL, '{"provider": "email"}', 'null', 'f', '2021-02-17 04:40:42.890632+00', '2021-02-17 04:40:42.890637+00'); - --- insert buckets -INSERT INTO "storage"."buckets" ("id", "name", "owner", "created_at", "updated_at") VALUES -('bucket2', 'bucket2', '4d56e902-f0a0-4662-8448-a4d9e643c142', '2021-02-17 04:43:32.770206+00', '2021-02-17 04:43:32.770206+00'), -('bucket3', 'bucket3', '4d56e902-f0a0-4662-8448-a4d9e643c142', '2021-02-17 04:43:32.770206+00', '2021-02-17 04:43:32.770206+00'), -('bucket4', 'bucket4', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-25 09:23:01.58385+00', '2021-02-25 09:23:01.58385+00'), -('bucket5', 'bucket5', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-27 03:04:25.6386+00', '2021-02-27 03:04:25.6386+00'); - - --- insert objects -INSERT INTO "storage"."objects" ("id", "bucket_id", "name", "owner", "created_at", "updated_at", "last_accessed_at", "metadata") VALUES -('03e458f9-892f-4db2-8cb9-d3401a689e25', 'bucket2', 'public/sadcat-upload23.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-04 08:26:08.553748+00', '2021-03-04 08:26:08.553748+00', '2021-03-04 08:26:08.553748+00', '{"mimetype": "image/svg+xml", "size": 1234}'), -('070825af-a11d-44fe-9f1d-abdc76f686f2', 'bucket2', 'public/sadcat-upload.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-02 16:31:11.115996+00', '2021-03-02 16:31:11.115996+00', '2021-03-02 16:31:11.115996+00', '{"mimetype": "image/png", "size": 1234}'), -('0cac5609-11e1-4f21-b486-d0eeb60909f6', 'bucket2', 'curlimage.jpg', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', '2021-02-23 11:05:16.625075+00', '2021-02-23 11:05:16.625075+00', '2021-02-23 11:05:16.625075+00', '{"size": 1234}'), -('147c6795-94d5-4008-9d81-f7ba3b4f8a9f', 'bucket2', 'folder/only_uid.jpg', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', '2021-02-17 10:36:01.504227+00', '2021-02-17 11:03:03.049618+00', '2021-02-17 10:36:01.504227+00', '{"size": 1234}'), -('65a3aa9c-0ff2-4adc-85d0-eab673c27443', 'bucket2', 'authenticated/casestudy.png', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', '2021-02-17 10:42:19.366559+00', '2021-02-17 11:03:30.025116+00', '2021-02-17 10:42:19.366559+00', '{"size": 1234}'), -('10ABE273-D77A-4BDA-B410-6FC0CA3E6ADC', 'bucket2', 'authenticated/cat.jpg', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', '2021-02-17 10:42:19.366559+00', '2021-02-17 11:03:30.025116+00', '2021-02-17 10:42:19.366559+00', '{"size": 1234}'), -('1edccac7-0876-4e9f-89da-a08d2a5f654b', 'bucket2', 'authenticated/delete.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-02 16:31:11.115996+00', '2021-03-02 16:31:11.115996+00', '2021-03-02 16:31:11.115996+00', '{"mimetype": "image/png", "size": 1234}'), -('1a911f3c-8c1d-4661-93c1-8e065e4d757e', 'bucket2', 'authenticated/delete1.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), -('372d5d74-e24d-49dc-abe8-47d7eb226a2e', 'bucket2', 'authenticated/delete-multiple1.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), -('34811c1b-85e5-4eb6-a5e3-d607b2f6986e', 'bucket2', 'authenticated/delete-multiple2.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), -('45950ff2-d3a8-4add-8e49-bafc01198340', 'bucket2', 'authenticated/delete-multiple3.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), -('469b0216-5419-41f6-9a37-2abfd7fad29c', 'bucket2', 'authenticated/delete-multiple4.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), -('55930619-a668-4dbc-aea3-b93dfe101e7f', 'bucket2', 'authenticated/delete-multiple7.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), -('D1CE4E4F-03E2-473D-858B-301D7989B581', 'bucket2', 'authenticated/move-orig.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), -('222b3d1e-bc17-414c-b336-47894aa4d697', 'bucket2', 'authenticated/move-orig-2.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), -('8f7d643d-1e82-4d39-ae39-d9bd6b0cfe9c', 'bucket2', 'authenticated/move-orig-3.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-02-22 22:29:15.14732+00', '2021-02-22 22:29:15.14732+00', '2021-03-02 09:32:17.116+00', '{"mimetype": "image/png", "size": 1234}'), -('8377527d-3518-4dc8-8290-c6926470e795', 'bucket2', 'folder/subfolder/public-all-permissions.png', 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2', '2021-02-17 10:26:42.791214+00', '2021-02-17 11:03:30.025116+00', '2021-02-17 10:26:42.791214+00', '{"size": 1234}'), -('b39ae4ab-802b-4c42-9271-3f908c34363c', 'bucket2', 'private/sadcat-upload3.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '{"mimetype": "image/svg+xml", "size": 1234}'), -('8098E1AC-C744-4368-86DF-71B60CCDE221', 'bucket3', 'sadcat-upload3.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '{"mimetype": "image/svg+xml", "size": 1234}'), -('D3EB488E-94F4-46CD-86D3-242C13B95BAC', 'bucket3', 'sadcat-upload2.png', '317eadce-631a-4429-a0bb-f19a7a517b4a', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '2021-03-01 08:53:29.567975+00', '{"mimetype": "image/svg+xml", "size": 1234}'); - --- add policies --- allows user to CRUD all buckets -CREATE POLICY crud_buckets ON storage.buckets for all USING (auth.uid() = '317eadce-631a-4429-a0bb-f19a7a517b4a'); --- allow public CRUD acccess to the public folder in bucket2 -CREATE POLICY crud_public_folder ON storage.objects for all USING (bucket_id='bucket2' and (storage.foldername(name))[1] = 'public'); --- allow public CRUD acccess to a particular file in bucket2 -CREATE POLICY crud_public_file ON storage.objects for all USING (bucket_id='bucket2' and name = 'folder/subfolder/public-all-permissions.png'); --- allow public CRUD acccess to a folder in bucket2 to a user with a given id -CREATE POLICY crud_uid_folder ON storage.objects for all USING (bucket_id='bucket2' and (storage.foldername(name))[1] = 'only_uid' and auth.uid() = 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2'); --- allow public CRUD acccess to a file in bucket2 to a user with a given id -CREATE POLICY crud_uid_file ON storage.objects for all USING (bucket_id='bucket2' and name = 'folder/only_uid.jpg' and auth.uid() = 'd8c7bce9-cfeb-497b-bd61-e66ce2cbdaa2'); --- allow CRUD acccess to a folder in bucket2 to all authenticated users -CREATE POLICY authenticated_folder ON storage.objects for all USING (bucket_id='bucket2' and (storage.foldername(name))[1] = 'authenticated' and auth.role() = 'authenticated'); --- allow CRUD access to a folder in bucket2 to its owners -CREATE POLICY crud_owner_only ON storage.objects for all USING (bucket_id='bucket2' and (storage.foldername(name))[1] = 'only_owner' and owner = auth.uid()); --- allow CRUD access to bucket4 -CREATE POLICY open_all_update ON storage.objects for all WITH CHECK (bucket_id='bucket4'); - -CREATE POLICY crud_my_bucket ON storage.objects for all USING (bucket_id='my-private-bucket' and auth.uid()::text = '317eadce-631a-4429-a0bb-f19a7a517b4a'); \ No newline at end of file diff --git a/infra/postgres/storage-schema.sql b/infra/postgres/storage-schema.sql deleted file mode 100644 index b0c9e2a6..00000000 --- a/infra/postgres/storage-schema.sql +++ /dev/null @@ -1,107 +0,0 @@ -CREATE SCHEMA IF NOT EXISTS storage AUTHORIZATION postgres; - -grant usage on schema storage to postgres, anon, authenticated, service_role; -alter default privileges in schema storage grant all on tables to postgres, anon, authenticated, service_role; -alter default privileges in schema storage grant all on functions to postgres, anon, authenticated, service_role; -alter default privileges in schema storage grant all on sequences to postgres, anon, authenticated, service_role; - -DROP TABLE IF EXISTS "storage"."buckets"; -CREATE TABLE "storage"."buckets" ( - "id" text not NULL, - "name" text NOT NULL, - "owner" uuid, - "created_at" timestamptz DEFAULT now(), - "updated_at" timestamptz DEFAULT now(), - CONSTRAINT "buckets_owner_fkey" FOREIGN KEY ("owner") REFERENCES "auth"."users"("id"), - PRIMARY KEY ("id") -); -CREATE UNIQUE INDEX "bname" ON "storage"."buckets" USING BTREE ("name"); - -DROP TABLE IF EXISTS "storage"."objects"; -CREATE TABLE "storage"."objects" ( - "id" uuid NOT NULL DEFAULT extensions.uuid_generate_v4(), - "bucket_id" text, - "name" text, - "owner" uuid, - "created_at" timestamptz DEFAULT now(), - "updated_at" timestamptz DEFAULT now(), - "last_accessed_at" timestamptz DEFAULT now(), - "metadata" jsonb, - CONSTRAINT "objects_bucketId_fkey" FOREIGN KEY ("bucket_id") REFERENCES "storage"."buckets"("id"), - CONSTRAINT "objects_owner_fkey" FOREIGN KEY ("owner") REFERENCES "auth"."users"("id"), - PRIMARY KEY ("id") -); -CREATE UNIQUE INDEX "bucketid_objname" ON "storage"."objects" USING BTREE ("bucket_id","name"); -CREATE INDEX name_prefix_search ON storage.objects(name text_pattern_ops); - -ALTER TABLE storage.objects ENABLE ROW LEVEL SECURITY; - -CREATE OR REPLACE FUNCTION storage.foldername(name text) - RETURNS text[] - LANGUAGE plpgsql -AS $function$ -DECLARE -_parts text[]; -BEGIN - select string_to_array(name, '/') into _parts; - return _parts[1:array_length(_parts,1)-1]; -END -$function$; - -CREATE OR REPLACE FUNCTION storage.filename(name text) - RETURNS text - LANGUAGE plpgsql -AS $function$ -DECLARE -_parts text[]; -BEGIN - select string_to_array(name, '/') into _parts; - return _parts[array_length(_parts,1)]; -END -$function$; - -CREATE OR REPLACE FUNCTION storage.extension(name text) - RETURNS text - LANGUAGE plpgsql -AS $function$ -DECLARE -_parts text[]; -_filename text; -BEGIN - select string_to_array(name, '/') into _parts; - select _parts[array_length(_parts,1)] into _filename; - return split_part(_filename, '.', 2); -END -$function$; - -CREATE OR REPLACE FUNCTION storage.search(prefix text, bucketname text, limits int DEFAULT 100, levels int DEFAULT 1, offsets int DEFAULT 0) - RETURNS TABLE ( - name text, - id uuid, - updated_at TIMESTAMPTZ, - created_at TIMESTAMPTZ, - last_accessed_at TIMESTAMPTZ, - metadata jsonb - ) - LANGUAGE plpgsql -AS $function$ -BEGIN - return query - with files_folders as ( - select ((string_to_array(objects.name, '/'))[levels]) as folder - from objects - where objects.name ilike prefix || '%' - and bucket_id = bucketname - GROUP by folder - limit limits - offset offsets - ) - select files_folders.folder as name, objects.id, objects.updated_at, objects.created_at, objects.last_accessed_at, objects.metadata from files_folders - left join objects - on prefix || files_folders.folder = objects.name and objects.bucket_id=bucketname; -END -$function$; - -GRANT ALL PRIVILEGES ON SCHEMA storage TO postgres; -GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA storage TO postgres; -GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA storage TO postgres; \ No newline at end of file diff --git a/infra/storage/Dockerfile b/infra/storage/Dockerfile deleted file mode 100644 index 31d66538..00000000 --- a/infra/storage/Dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM supabase/storage-api:v0.25.1 - -RUN apk add curl --no-cache \ No newline at end of file diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 812070f1..ae8d28ea 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -82,10 +82,8 @@ async def create_signed_urls( f"/object/sign/{self.bucket_id}", json={"expires_in": expires_in, "paths": paths}, ) - # TODO(joel): add support for download option return response.json() - pass async def get_public_url(self, path: str, options: TransformOptions = {}) -> str: """ From b43434f6e947ce8a5251c042c19a90f8b6d86a6b Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:01:12 +0800 Subject: [PATCH 06/20] refactor: remove create_signed_urls --- storage3/_async/file_api.py | 11 ----------- storage3/_sync/file_api.py | 13 ------------- tests/_sync/test_client.py | 4 +++- 3 files changed, 3 insertions(+), 25 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index ae8d28ea..92616b8e 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -74,17 +74,6 @@ async def create_signed_url( ] = f"{self._client.base_url}{cast(str, data['signedURL']).lstrip('/')}" return data - async def create_signed_urls( - self, paths: List[str], expires_in: int, options: dict[str, str] - ) -> dict[str, str]: - response = await self._request( - "POST", - f"/object/sign/{self.bucket_id}", - json={"expires_in": expires_in, "paths": paths}, - ) - return response.json() - - async def get_public_url(self, path: str, options: TransformOptions = {}) -> str: """ Parameters diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 35f4f740..dd2da046 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -74,19 +74,6 @@ def create_signed_url( ] = f"{self._client.base_url}{cast(str, data['signedURL']).lstrip('/')}" return data - def create_signed_urls( - self, paths: List[str], expires_in: int, options: dict[str, str] - ) -> dict[str, str]: - response = self._request( - "POST", - f"/object/sign/{self.bucket_id}", - json={"expires_in": expires_in, "paths": paths}, - ) - # TODO(joel): add support for download option - return response.json() - - pass - def get_public_url(self, path: str, options: TransformOptions = {}) -> str: """ Parameters diff --git a/tests/_sync/test_client.py b/tests/_sync/test_client.py index d3622128..30154626 100644 --- a/tests/_sync/test_client.py +++ b/tests/_sync/test_client.py @@ -79,7 +79,9 @@ def bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: @pytest.fixture(scope="module") -def public_bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: +def public_bucket( + storage: SyncStorageClient, uuid_factory: Callable[[], str] +) -> str: """Creates a test public bucket which will be used in the whole storage tests run and deleted at the end""" bucket_id = uuid_factory() From c5e5abaa8f7830cbfcd1caeafcd5d3c2140f2f0d Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:07:30 +0800 Subject: [PATCH 07/20] fix: import Union, Optional from typing instead of typing-extensions --- storage3/types.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/storage3/types.py b/storage3/types.py index 2d82d6e0..a8a6cbe3 100644 --- a/storage3/types.py +++ b/storage3/types.py @@ -2,7 +2,8 @@ from datetime import datetime import dateutil.parser -from typing_extensions import Literal, TypedDict, Union, Optional +from typing_extensions import Literal, TypedDict +from typing import Union, Optional RequestMethod = Literal["GET", "POST", "DELETE", "PUT", "HEAD"] From f4005fd672bc86bb694f2ddf50e202be7beb6370 Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:11:29 +0800 Subject: [PATCH 08/20] fix: switch from | to Union --- storage3/types.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/storage3/types.py b/storage3/types.py index a8a6cbe3..a4038d38 100644 --- a/storage3/types.py +++ b/storage3/types.py @@ -41,9 +41,9 @@ class ListBucketFilesOptions(TypedDict): class TransformOptions(TypedDict): height: Optional[float] width: Optional[float] - resize: Optional[Literal["cover"] | Literal["contain"] | Literal["fill"]] + resize: Optional[Union[Literal["cover"], Literal["contain"], Literal["fill"]]] class CreateSignedURLOptions(TypedDict): - download: Optional[str | bool] + download: Optional[Union[str,bool]] transform: Optional[TransformOptions] From f0c8fdcf69cd397e31c24091ae10550d65e0cb97 Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:19:40 +0800 Subject: [PATCH 09/20] fix: remove stray $ --- storage3/_async/file_api.py | 2 +- storage3/_sync/file_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 92616b8e..296a3d4e 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -87,7 +87,7 @@ async def get_public_url(self, path: str, options: TransformOptions = {}) -> str transformation_query = urllib.parse.urlencode(options) query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) - return f"{self._client.base_url}{render_path}/public/{_path}/${query_string}" + return f"{self._client.base_url}{render_path}/public/{_path}/{query_string}" async def move(self, from_path: str, to_path: str) -> dict[str, str]: """ diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index dd2da046..74d8f7f7 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -87,7 +87,7 @@ def get_public_url(self, path: str, options: TransformOptions = {}) -> str: transformation_query = urllib.parse.urlencode(options) query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) - return f"{self._client.base_url}{render_path}/public/{_path}/${query_string}" + return f"{self._client.base_url}{render_path}/public/{_path}/{query_string}" def move(self, from_path: str, to_path: str) -> dict[str, str]: """ From ef32ae7e5137c54c8ea35ad3b8c2f2b965a8aecd Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:27:53 +0800 Subject: [PATCH 10/20] chore: set TransformOptions to None --- storage3/_async/file_api.py | 2 +- storage3/_sync/file_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 296a3d4e..15d73b0a 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -74,7 +74,7 @@ async def create_signed_url( ] = f"{self._client.base_url}{cast(str, data['signedURL']).lstrip('/')}" return data - async def get_public_url(self, path: str, options: TransformOptions = {}) -> str: + async def get_public_url(self, path: str, options: TransformOptions = None) -> str: """ Parameters ---------- diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 74d8f7f7..1ce95d8c 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -74,7 +74,7 @@ def create_signed_url( ] = f"{self._client.base_url}{cast(str, data['signedURL']).lstrip('/')}" return data - def get_public_url(self, path: str, options: TransformOptions = {}) -> str: + def get_public_url(self, path: str, options: TransformOptions = None) -> str: """ Parameters ---------- From 686b7fa03007ee6dddc2bd96492a171bddf7de82 Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:31:31 +0800 Subject: [PATCH 11/20] fix: strip out transformation changes --- storage3/_async/file_api.py | 13 +++++++------ storage3/_sync/file_api.py | 13 +++++++------ 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 15d73b0a..1b39b8be 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -74,18 +74,19 @@ async def create_signed_url( ] = f"{self._client.base_url}{cast(str, data['signedURL']).lstrip('/')}" return data - async def get_public_url(self, path: str, options: TransformOptions = None) -> str: + async def get_public_url(self, path: str, options: TransformOptions = {}) -> str: """ Parameters ---------- path file path, including the path and file name. For example `folder/image.png`. """ - render_path = ( - "render/image/authenticated" if options.get("transform") else "object" - ) - transformation_query = urllib.parse.urlencode(options) - query_string = f"?{transformation_query}" if transformation_query else "" + render_path = "object" + # render_path = ( + # "render/image/authenticated" if options.get("transform") else "object" + # ) + # transformation_query = urllib.parse.urlencode(options) + # query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) return f"{self._client.base_url}{render_path}/public/{_path}/{query_string}" diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 1ce95d8c..c02c5745 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -74,18 +74,19 @@ def create_signed_url( ] = f"{self._client.base_url}{cast(str, data['signedURL']).lstrip('/')}" return data - def get_public_url(self, path: str, options: TransformOptions = None) -> str: + def get_public_url(self, path: str, options: TransformOptions = {}) -> str: """ Parameters ---------- path file path, including the path and file name. For example `folder/image.png`. """ - render_path = ( - "render/image/authenticated" if options.get("transform") else "object" - ) - transformation_query = urllib.parse.urlencode(options) - query_string = f"?{transformation_query}" if transformation_query else "" + render_path = "object" + # render_path = ( + # "render/image/authenticated" if options.get("transform") else "object" + # ) + # transformation_query = urllib.parse.urlencode(options) + # query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) return f"{self._client.base_url}{render_path}/public/{_path}/{query_string}" From 72d299d4257265099e44c6b9e821fa3ca86d19ab Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:38:06 +0800 Subject: [PATCH 12/20] fix: add query string param --- storage3/_async/file_api.py | 1 + storage3/_sync/file_api.py | 1 + 2 files changed, 2 insertions(+) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 1b39b8be..a43ad80e 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -87,6 +87,7 @@ async def get_public_url(self, path: str, options: TransformOptions = {}) -> str # ) # transformation_query = urllib.parse.urlencode(options) # query_string = f"?{transformation_query}" if transformation_query else "" + query_string = "" _path = self._get_final_path(path) return f"{self._client.base_url}{render_path}/public/{_path}/{query_string}" diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index c02c5745..168897f0 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -87,6 +87,7 @@ def get_public_url(self, path: str, options: TransformOptions = {}) -> str: # ) # transformation_query = urllib.parse.urlencode(options) # query_string = f"?{transformation_query}" if transformation_query else "" + query_string = "" _path = self._get_final_path(path) return f"{self._client.base_url}{render_path}/public/{_path}/{query_string}" From 1feb82590a9f3ca025da41da82e8089f7835783f Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:42:21 +0800 Subject: [PATCH 13/20] fix: remove query params --- storage3/_async/file_api.py | 2 +- storage3/_sync/file_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index a43ad80e..082f87cc 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -89,7 +89,7 @@ async def get_public_url(self, path: str, options: TransformOptions = {}) -> str # query_string = f"?{transformation_query}" if transformation_query else "" query_string = "" _path = self._get_final_path(path) - return f"{self._client.base_url}{render_path}/public/{_path}/{query_string}" + return f"{self._client.base_url}{render_path}/public/{_path}" async def move(self, from_path: str, to_path: str) -> dict[str, str]: """ diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 168897f0..85fbc51f 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -89,7 +89,7 @@ def get_public_url(self, path: str, options: TransformOptions = {}) -> str: # query_string = f"?{transformation_query}" if transformation_query else "" query_string = "" _path = self._get_final_path(path) - return f"{self._client.base_url}{render_path}/public/{_path}/{query_string}" + return f"{self._client.base_url}{render_path}/public/{_path}" def move(self, from_path: str, to_path: str) -> dict[str, str]: """ From 604e804e73583cd396829d41ecb8f3baf789f754 Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:51:46 +0800 Subject: [PATCH 14/20] fix: handle stray / --- storage3/_async/file_api.py | 14 ++++++-------- storage3/_sync/file_api.py | 14 ++++++-------- 2 files changed, 12 insertions(+), 16 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 082f87cc..f5f8a7cb 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -81,15 +81,13 @@ async def get_public_url(self, path: str, options: TransformOptions = {}) -> str path file path, including the path and file name. For example `folder/image.png`. """ - render_path = "object" - # render_path = ( - # "render/image/authenticated" if options.get("transform") else "object" - # ) - # transformation_query = urllib.parse.urlencode(options) - # query_string = f"?{transformation_query}" if transformation_query else "" - query_string = "" + render_path = ( + "render/image/authenticated" if options.get("transform") else "object" + ) + transformation_query = urllib.parse.urlencode(options) + query_string = f"/?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) - return f"{self._client.base_url}{render_path}/public/{_path}" + return f"{self._client.base_url}{render_path}/public/{_path}{query_string}" async def move(self, from_path: str, to_path: str) -> dict[str, str]: """ diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 85fbc51f..bd3059d9 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -81,15 +81,13 @@ def get_public_url(self, path: str, options: TransformOptions = {}) -> str: path file path, including the path and file name. For example `folder/image.png`. """ - render_path = "object" - # render_path = ( - # "render/image/authenticated" if options.get("transform") else "object" - # ) - # transformation_query = urllib.parse.urlencode(options) - # query_string = f"?{transformation_query}" if transformation_query else "" - query_string = "" + render_path = ( + "render/image/authenticated" if options.get("transform") else "object" + ) + transformation_query = urllib.parse.urlencode(options) + query_string = f"/?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) - return f"{self._client.base_url}{render_path}/public/{_path}" + return f"{self._client.base_url}{render_path}/public/{_path}{query_string}" def move(self, from_path: str, to_path: str) -> dict[str, str]: """ From dd72fd6758f975e8d1f1ba56765c21ac846cf62c Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:52:13 +0800 Subject: [PATCH 15/20] fix: handle stray / --- storage3/_async/file_api.py | 8 ++++---- storage3/_sync/file_api.py | 8 ++++---- storage3/types.py | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index f5f8a7cb..bf47f717 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -1,6 +1,6 @@ from __future__ import annotations -import urllib.parse +import urllib.parse from dataclasses import dataclass, field from io import BufferedReader, FileIO from pathlib import Path @@ -11,10 +11,10 @@ from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS from ..types import ( BaseBucket, - ListBucketFilesOptions, CreateSignedURLOptions, - TransformOptions, + ListBucketFilesOptions, RequestMethod, + TransformOptions, ) from ..utils import AsyncClient, StorageException @@ -82,7 +82,7 @@ async def get_public_url(self, path: str, options: TransformOptions = {}) -> str file path, including the path and file name. For example `folder/image.png`. """ render_path = ( - "render/image/authenticated" if options.get("transform") else "object" + "render/image/authenticated" if options.get("transform") else "object" ) transformation_query = urllib.parse.urlencode(options) query_string = f"/?{transformation_query}" if transformation_query else "" diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index bd3059d9..38ea28ce 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -1,6 +1,6 @@ from __future__ import annotations -import urllib.parse +import urllib.parse from dataclasses import dataclass, field from io import BufferedReader, FileIO from pathlib import Path @@ -11,10 +11,10 @@ from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS from ..types import ( BaseBucket, - ListBucketFilesOptions, CreateSignedURLOptions, - TransformOptions, + ListBucketFilesOptions, RequestMethod, + TransformOptions, ) from ..utils import SyncClient, StorageException @@ -82,7 +82,7 @@ def get_public_url(self, path: str, options: TransformOptions = {}) -> str: file path, including the path and file name. For example `folder/image.png`. """ render_path = ( - "render/image/authenticated" if options.get("transform") else "object" + "render/image/authenticated" if options.get("transform") else "object" ) transformation_query = urllib.parse.urlencode(options) query_string = f"/?{transformation_query}" if transformation_query else "" diff --git a/storage3/types.py b/storage3/types.py index a4038d38..50b429aa 100644 --- a/storage3/types.py +++ b/storage3/types.py @@ -1,9 +1,9 @@ from dataclasses import dataclass from datetime import datetime +from typing import Optional, Union import dateutil.parser from typing_extensions import Literal, TypedDict -from typing import Union, Optional RequestMethod = Literal["GET", "POST", "DELETE", "PUT", "HEAD"] @@ -45,5 +45,5 @@ class TransformOptions(TypedDict): class CreateSignedURLOptions(TypedDict): - download: Optional[Union[str,bool]] + download: Optional[Union[str, bool]] transform: Optional[TransformOptions] From 42a9ed3f8f4e3d8e66f065f2ec64936a95b422b5 Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Fri, 6 Jan 2023 11:53:12 +0800 Subject: [PATCH 16/20] fix: run black --- storage3/_sync/bucket.py | 2 +- storage3/_sync/file_api.py | 2 +- tests/_sync/test_client.py | 4 +--- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/storage3/_sync/bucket.py b/storage3/_sync/bucket.py index 1dbfd81b..efbd6cd8 100644 --- a/storage3/_sync/bucket.py +++ b/storage3/_sync/bucket.py @@ -5,7 +5,7 @@ from httpx import HTTPError, Response from ..types import RequestMethod -from ..utils import SyncClient, StorageException +from ..utils import StorageException, SyncClient from .file_api import SyncBucket __all__ = ["SyncStorageBucketAPI"] diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 38ea28ce..0e8cca5e 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -16,7 +16,7 @@ RequestMethod, TransformOptions, ) -from ..utils import SyncClient, StorageException +from ..utils import StorageException, SyncClient __all__ = ["SyncBucket"] diff --git a/tests/_sync/test_client.py b/tests/_sync/test_client.py index 30154626..d3622128 100644 --- a/tests/_sync/test_client.py +++ b/tests/_sync/test_client.py @@ -79,9 +79,7 @@ def bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: @pytest.fixture(scope="module") -def public_bucket( - storage: SyncStorageClient, uuid_factory: Callable[[], str] -) -> str: +def public_bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: """Creates a test public bucket which will be used in the whole storage tests run and deleted at the end""" bucket_id = uuid_factory() From 0272f1b5dd787b8a51727c1c9de3116c15b124b6 Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Sun, 8 Jan 2023 17:43:30 +0800 Subject: [PATCH 17/20] fix: update render_path for get_public_url --- storage3/_async/file_api.py | 4 ++-- storage3/_sync/bucket.py | 2 +- storage3/_sync/file_api.py | 6 +++--- tests/_sync/test_client.py | 4 +++- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index bf47f717..96d4ad8f 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -82,12 +82,12 @@ async def get_public_url(self, path: str, options: TransformOptions = {}) -> str file path, including the path and file name. For example `folder/image.png`. """ render_path = ( - "render/image/authenticated" if options.get("transform") else "object" + "render/image" if options.get("transform") else "object" ) transformation_query = urllib.parse.urlencode(options) query_string = f"/?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) - return f"{self._client.base_url}{render_path}/public/{_path}{query_string}" + return f"{self._client.base_url}{render_path}/public{_path}{query_string}" async def move(self, from_path: str, to_path: str) -> dict[str, str]: """ diff --git a/storage3/_sync/bucket.py b/storage3/_sync/bucket.py index efbd6cd8..1dbfd81b 100644 --- a/storage3/_sync/bucket.py +++ b/storage3/_sync/bucket.py @@ -5,7 +5,7 @@ from httpx import HTTPError, Response from ..types import RequestMethod -from ..utils import StorageException, SyncClient +from ..utils import SyncClient, StorageException from .file_api import SyncBucket __all__ = ["SyncStorageBucketAPI"] diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 0e8cca5e..01ee3870 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -16,7 +16,7 @@ RequestMethod, TransformOptions, ) -from ..utils import StorageException, SyncClient +from ..utils import SyncClient, StorageException __all__ = ["SyncBucket"] @@ -82,12 +82,12 @@ def get_public_url(self, path: str, options: TransformOptions = {}) -> str: file path, including the path and file name. For example `folder/image.png`. """ render_path = ( - "render/image/authenticated" if options.get("transform") else "object" + "render/image" if options.get("transform") else "object" ) transformation_query = urllib.parse.urlencode(options) query_string = f"/?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) - return f"{self._client.base_url}{render_path}/public/{_path}{query_string}" + return f"{self._client.base_url}{render_path}/public{_path}{query_string}" def move(self, from_path: str, to_path: str) -> dict[str, str]: """ diff --git a/tests/_sync/test_client.py b/tests/_sync/test_client.py index d3622128..30154626 100644 --- a/tests/_sync/test_client.py +++ b/tests/_sync/test_client.py @@ -79,7 +79,9 @@ def bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: @pytest.fixture(scope="module") -def public_bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: +def public_bucket( + storage: SyncStorageClient, uuid_factory: Callable[[], str] +) -> str: """Creates a test public bucket which will be used in the whole storage tests run and deleted at the end""" bucket_id = uuid_factory() From 216cf3667479380fb893e16b4bd47d31f5a2b641 Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Tue, 10 Jan 2023 22:16:59 +0800 Subject: [PATCH 18/20] fix: remove stray / --- storage3/_async/file_api.py | 2 +- storage3/_sync/file_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 96d4ad8f..4c3ddda7 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -85,7 +85,7 @@ async def get_public_url(self, path: str, options: TransformOptions = {}) -> str "render/image" if options.get("transform") else "object" ) transformation_query = urllib.parse.urlencode(options) - query_string = f"/?{transformation_query}" if transformation_query else "" + query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) return f"{self._client.base_url}{render_path}/public{_path}{query_string}" diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 01ee3870..791a423d 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -85,7 +85,7 @@ def get_public_url(self, path: str, options: TransformOptions = {}) -> str: "render/image" if options.get("transform") else "object" ) transformation_query = urllib.parse.urlencode(options) - query_string = f"/?{transformation_query}" if transformation_query else "" + query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) return f"{self._client.base_url}{render_path}/public{_path}{query_string}" From 27b6bcd85866a152a1804679cb27f726a50d2bf3 Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Tue, 10 Jan 2023 22:21:42 +0800 Subject: [PATCH 19/20] fix: add stray / --- storage3/_async/file_api.py | 2 +- storage3/_sync/file_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 4c3ddda7..730929b0 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -87,7 +87,7 @@ async def get_public_url(self, path: str, options: TransformOptions = {}) -> str transformation_query = urllib.parse.urlencode(options) query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) - return f"{self._client.base_url}{render_path}/public{_path}{query_string}" + return f"{self._client.base_url}{render_path}/public/{_path}{query_string}" async def move(self, from_path: str, to_path: str) -> dict[str, str]: """ diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 791a423d..96c722f1 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -87,7 +87,7 @@ def get_public_url(self, path: str, options: TransformOptions = {}) -> str: transformation_query = urllib.parse.urlencode(options) query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) - return f"{self._client.base_url}{render_path}/public{_path}{query_string}" + return f"{self._client.base_url}{render_path}/public/{_path}{query_string}" def move(self, from_path: str, to_path: str) -> dict[str, str]: """ From 3e985a9e6b4305af0ec93c58c9af304c639b6df0 Mon Sep 17 00:00:00 2001 From: "joel@joellee.org" Date: Tue, 10 Jan 2023 22:27:20 +0800 Subject: [PATCH 20/20] chore: run black --- storage3/_async/file_api.py | 4 +--- storage3/_sync/bucket.py | 2 +- storage3/_sync/file_api.py | 6 ++---- tests/_sync/test_client.py | 4 +--- 4 files changed, 5 insertions(+), 11 deletions(-) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py index 730929b0..c5b906df 100644 --- a/storage3/_async/file_api.py +++ b/storage3/_async/file_api.py @@ -81,9 +81,7 @@ async def get_public_url(self, path: str, options: TransformOptions = {}) -> str path file path, including the path and file name. For example `folder/image.png`. """ - render_path = ( - "render/image" if options.get("transform") else "object" - ) + render_path = "render/image" if options.get("transform") else "object" transformation_query = urllib.parse.urlencode(options) query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) diff --git a/storage3/_sync/bucket.py b/storage3/_sync/bucket.py index 1dbfd81b..efbd6cd8 100644 --- a/storage3/_sync/bucket.py +++ b/storage3/_sync/bucket.py @@ -5,7 +5,7 @@ from httpx import HTTPError, Response from ..types import RequestMethod -from ..utils import SyncClient, StorageException +from ..utils import StorageException, SyncClient from .file_api import SyncBucket __all__ = ["SyncStorageBucketAPI"] diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py index 96c722f1..4f5da05f 100644 --- a/storage3/_sync/file_api.py +++ b/storage3/_sync/file_api.py @@ -16,7 +16,7 @@ RequestMethod, TransformOptions, ) -from ..utils import SyncClient, StorageException +from ..utils import StorageException, SyncClient __all__ = ["SyncBucket"] @@ -81,9 +81,7 @@ def get_public_url(self, path: str, options: TransformOptions = {}) -> str: path file path, including the path and file name. For example `folder/image.png`. """ - render_path = ( - "render/image" if options.get("transform") else "object" - ) + render_path = "render/image" if options.get("transform") else "object" transformation_query = urllib.parse.urlencode(options) query_string = f"?{transformation_query}" if transformation_query else "" _path = self._get_final_path(path) diff --git a/tests/_sync/test_client.py b/tests/_sync/test_client.py index 30154626..d3622128 100644 --- a/tests/_sync/test_client.py +++ b/tests/_sync/test_client.py @@ -79,9 +79,7 @@ def bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: @pytest.fixture(scope="module") -def public_bucket( - storage: SyncStorageClient, uuid_factory: Callable[[], str] -) -> str: +def public_bucket(storage: SyncStorageClient, uuid_factory: Callable[[], str]) -> str: """Creates a test public bucket which will be used in the whole storage tests run and deleted at the end""" bucket_id = uuid_factory()