Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(api): partial schema insertion #1057

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions .air.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# Config file for [Air](https://github.com/cosmtrek/air) in TOML format

# Working directory
# . or absolute path, please note that the directories following must be under root
root = "."
tmp_dir = "/tmp"

[build]
# Just plain old shell command. You could use `make` as well.
cmd = "go build -o ./tmp/app/permify ./cmd/permify"
# Binary file yields from `cmd`.
bin = "/tmp/app"

# Customize binary.
# This is how you start to run your application. Since my application will works like CLI, so to run it, like to make a CLI call.
full_bin = "./tmp/app/permify serve --database-engine postgres --database-uri postgres://postgres:secret@database:5432/permify --database-max-open-connections 20"
# This log file places in your tmp_dir.
log = "air_errors.log"
# Watch these filename extensions.
include_ext = ["go", "yaml",".env"]
# Ignore these filename extensions or directories.
exclude_dir = ["tmp", "docs"]
# It's not necessary to trigger build each time file changes if it's too frequent.
delay = 500 # ms

[log]
# Show log time
time = true
[color]

[misc]
# Delete tmp directory on exit
clean_on_exit = true
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,5 @@
# Dependency directories (remove the comment below to include it)
vendor/dist/
/dist
/config
/config
/tmp
10 changes: 10 additions & 0 deletions Dockerfile.local
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
FROM golang:1.21-alpine

RUN apk --no-cache add curl
# Install the air binary so we get live code-reloading when we save files
RUN curl -sSfL https://raw.githubusercontent.com/cosmtrek/air/master/install.sh | sh -s -- -b $(go env GOPATH)/bin

# Run the air command in the directory where our code will live
WORKDIR /app

ENTRYPOINT [ "air" ]
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ help: ## Display this help screen
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)

compose-up: ### Run docker-compose
docker-compose up --build -d postgres && docker-compose logs -f
docker-compose up --build
.PHONY: compose-up

compose-up-integration-test: ### Run docker-compose with integration test
Expand Down
59 changes: 34 additions & 25 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,31 +1,40 @@
version: "3.9"
services:
permify:
image: "ghcr.io/permify/permify:latest"
command: "serve --database-engine postgres --database-uri postgres://postgres:secret@database:5432/permify --database-max-open-connections 20"
restart: "always"
ports:
- "3476:3476"
- "3478:3478"
depends_on:
- "database"
permify:
build:
context: .
dockerfile: Dockerfile.local
restart: "always"
ports:
- "3476:3476"
- "3478:3478"
volumes:
- .:/app
depends_on:
- "database"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3476/healthz"]
interval: 10s
retries: 10
start_period: 60s

database:
image: "postgres"
ports:
- "5432:5432"
environment:
- "POSTGRES_PASSWORD=secret"
- "POSTGRES_DB=permify"
database:
image: "postgres"
ports:
- "5432:5432"
environment:
- "POSTGRES_PASSWORD=secret"
- "POSTGRES_DB=permify"

integration:
build:
context: .
dockerfile: integration-test/Dockerfile
container_name: integration
image: integration
depends_on:
- permify
integration:
build:
context: .
dockerfile: integration-test/Dockerfile
container_name: integration
image: integration
depends_on:
permify:
condition: service_healthy

volumes:
pg-data:
pg-data:
104 changes: 101 additions & 3 deletions docs/apidocs.swagger.json
Original file line number Diff line number Diff line change
Expand Up @@ -1066,6 +1066,60 @@
]
}
},
"/v1/tenants/{tenant_id}/schemas/partial-write": {
"patch": {
"summary": "partially update your authorization model",
"operationId": "schemas.partial-write",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/SchemaPartialWriteResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/Status"
}
}
},
"parameters": [
{
"name": "tenant_id",
"description": "tenant_id is a string that identifies the tenant. It must match the pattern \"[a-zA-Z0-9-,]+\",\nbe a maximum of 64 bytes, and must not be empty.",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"type": "object",
"properties": {
"metadata": {
"$ref": "#/definitions/SchemaPartialWriteRequestMetadata",
"description": "metadata is the additional information needed for the Partial Write request."
},
"entities": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/SchemaPartialWriteRequestEntities"
},
"title": "Map of entity name with the values needed to be updated"
}
},
"title": "It contains the tenant_id to identify the tenant and metadata of the schema to be edited,\nwith the corresponding edits to various entities"
}
}
],
"tags": [
"Schema"
]
}
},
"/v1/tenants/{tenant_id}/schemas/read": {
"post": {
"summary": "read your authorization model",
Expand Down Expand Up @@ -1240,11 +1294,11 @@
"properties": {
"@type": {
"type": "string",
"description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics."
"description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics."
}
},
"additionalProperties": {},
"description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }"
"description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }"
},
"Argument": {
"type": "object",
Expand Down Expand Up @@ -2044,7 +2098,7 @@
"NULL_VALUE"
],
"default": "NULL_VALUE",
"description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\n The JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value."
"description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value."
},
"PermissionCheckRequestMetadata": {
"type": "object",
Expand Down Expand Up @@ -2426,6 +2480,50 @@
"default": "REFERENCE_UNSPECIFIED",
"description": "The Reference enum helps distinguish whether a name corresponds to an entity or a rule.\n\n - REFERENCE_UNSPECIFIED: Default, unspecified reference.\n - REFERENCE_ENTITY: Indicates that the name refers to an entity.\n - REFERENCE_RULE: Indicates that the name refers to a rule."
},
"SchemaPartialWriteRequestEntities": {
"type": "object",
"properties": {
"write": {
"type": "array",
"items": {
"type": "string"
}
},
"delete": {
"type": "array",
"items": {
"type": "string"
}
},
"update": {
"type": "array",
"items": {
"type": "string"
}
}
},
"title": "SchemaPartialWriteRequestEntities contains the write, update and delete definitions\nfor updating a particular entity"
},
"SchemaPartialWriteRequestMetadata": {
"type": "object",
"properties": {
"schema_version": {
"type": "string",
"description": "schema_version is the string that identifies the version of the schema to be read."
}
},
"description": "SchemaPartialWriteRequestMetadata provides additional information for the Schema Partial Write request.\nIt contains schema_version to specify which version of the schema should be read."
},
"SchemaPartialWriteResponse": {
"type": "object",
"properties": {
"schema_version": {
"type": "string",
"description": "schema_version is the string that identifies the version of the written schema."
}
},
"description": "SchemaPartialWriteResponse is the response message for the Parital Write method in the Schema service.\nIt returns the requested schema."
},
"SchemaReadRequestMetadata": {
"type": "object",
"properties": {
Expand Down
15 changes: 15 additions & 0 deletions internal/factories/storage.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,21 @@ func SchemaReaderFactory(db database.Database) (repo storage.SchemaReader) {
}
}

// SchemaUpdaterFactory creates and returns a SchemaUpdater based on the database engine type.
func SchemaUpdaterFactory(db database.Database) (repo storage.SchemaUpdater) {
switch db.GetEngineType() {
case "postgres":
// If the database engine is Postgres, create a new SchemaUpdater using the Postgres implementation
return PQRepository.NewSchemaUpdater(db.(*PQDatabase.Postgres))
case "memory":
// If the database engine is in-memory, create a new SchemaUpdater using the in-memory implementation
return MMRepository.NewSchemaUpdater(db.(*MMDatabase.Memory))
default:
// For any other type, use the in-memory implementation as default
return MMRepository.NewSchemaUpdater(db.(*MMDatabase.Memory))
}
}

// WatcherFactory creates and returns a Watcher based on the database engine type.
func WatcherFactory(db database.Database) (repo storage.Watcher) {
switch db.GetEngineType() {
Expand Down
14 changes: 14 additions & 0 deletions internal/servers/error.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,17 @@ func GetStatus(err error) codes.Code {
return codes.Internal
}
}

// MultiError represents multiple errors.
type MultiError struct {
Errors []error
}

// Error returns the concatenated error messages.
func (m *MultiError) Error() string {
var errMsg string
for _, err := range m.Errors {
errMsg += err.Error() + "\n"
}
return errMsg
}
Loading
Loading