diff --git a/components/BUILD.yaml b/components/BUILD.yaml index 4ec2247113c970..d06555866b15a1 100644 --- a/components/BUILD.yaml +++ b/components/BUILD.yaml @@ -148,7 +148,7 @@ scripts: srcs: - components/**/* script: | - GO_COMPONENTS=( local-app-api content-service-api image-builder-api registry-facade-api supervisor-api ws-daemon-api ws-manager-api ws-manager-bridge-api usage-api ) + GO_COMPONENTS=( local-app-api content-service-api image-builder-api registry-facade-api supervisor-api ws-daemon-api ws-manager-api ws-manager-bridge-api usage-api spicedb ) for COMPONENT in "${GO_COMPONENTS[@]}";do echo "Generating code for component $COMPONENT..." @@ -157,7 +157,7 @@ scripts: popd > /dev/null done - WEB_COMPONENTS=( local-app-api/typescript-grpcweb supervisor-api/typescript-grpc supervisor-api/typescript-grpcweb ) + WEB_COMPONENTS=( local-app-api/typescript-grpcweb supervisor-api/typescript-grpc supervisor-api/typescript-grpcweb spicedb/typescript ) for COMPONENT in "${WEB_COMPONENTS[@]}";do echo "Generating code for component $COMPONENT..." pushd $COMPONENT > /dev/null diff --git a/components/server/BUILD.yaml b/components/server/BUILD.yaml index 7cdc9ac41028fe..6aba7302a342bf 100644 --- a/components/server/BUILD.yaml +++ b/components/server/BUILD.yaml @@ -19,6 +19,7 @@ packages: - components/public-api/typescript:lib - components/gitpod-db:dbtest-init - components/spicedb:lib + - components/spicedb/typescript:lib config: packaging: offline-mirror yarnLock: ${coreYarnLockBase}/yarn.lock diff --git a/components/server/package.json b/components/server/package.json index 70599872a8424e..ec5a438588f8ed 100644 --- a/components/server/package.json +++ b/components/server/package.json @@ -56,6 +56,7 @@ "@gitpod/supervisor-api-grpcweb": "0.1.5", "@gitpod/usage-api": "0.1.5", "@gitpod/ws-manager": "0.1.5", + "@gitpod/spicedb-impl": "0.1.5", "@google-cloud/profiler": "^6.0.0", "@improbable-eng/grpc-web-node-http-transport": "^0.14.0", "@jmondi/oauth2-server": "^2.6.1", diff --git a/components/server/src/authorization/authorizer.ts b/components/server/src/authorization/authorizer.ts index 3ec1bc6d3de3c9..f505d92f5d9376 100644 --- a/components/server/src/authorization/authorizer.ts +++ b/components/server/src/authorization/authorizer.ts @@ -69,7 +69,8 @@ export class Authorizer { consistency, }); - return this.authorizer.check(req, { userId }); + const result = await this.authorizer.check(req, { userId }); + return result.permitted; } async checkPermissionOnInstallation(userId: string, permission: InstallationPermission): Promise { @@ -98,7 +99,8 @@ export class Authorizer { consistency, }); - return this.authorizer.check(req, { userId }); + const result = await this.authorizer.check(req, { userId }); + return result.permitted; } async checkPermissionOnOrganization(userId: string, permission: OrganizationPermission, orgId: string) { @@ -128,7 +130,8 @@ export class Authorizer { consistency, }); - return this.authorizer.check(req, { userId }); + const result = await this.authorizer.check(req, { userId }); + return result.permitted; } async checkPermissionOnProject(userId: string, permission: ProjectPermission, projectId: string) { @@ -158,7 +161,8 @@ export class Authorizer { consistency, }); - return this.authorizer.check(req, { userId }); + const result = await this.authorizer.check(req, { userId }); + return result.permitted; } async checkPermissionOnUser(userId: string, permission: UserPermission, resourceUserId: string) { @@ -192,7 +196,8 @@ export class Authorizer { consistency, }); - return this.authorizer.check(req, { userId }, forceEnablement); + const result = await this.authorizer.check(req, { userId }, forceEnablement); + return result.permitted; } async checkPermissionOnWorkspace(userId: string, permission: WorkspacePermission, workspaceId: string) { diff --git a/components/server/src/authorization/caching-spicedb-authorizer.spec.db.ts b/components/server/src/authorization/caching-spicedb-authorizer.spec.db.ts new file mode 100644 index 00000000000000..cf3072b716a575 --- /dev/null +++ b/components/server/src/authorization/caching-spicedb-authorizer.spec.db.ts @@ -0,0 +1,234 @@ +/** + * Copyright (c) 2023 Gitpod GmbH. All rights reserved. + * Licensed under the GNU Affero General Public License (AGPL). + * See License.AGPL.txt in the project root for license information. + */ +import { TypeORM } from "@gitpod/gitpod-db/lib"; +import { resetDB } from "@gitpod/gitpod-db/lib/test/reset-db"; +import { CommitContext, Organization, Project, User, WorkspaceConfig } from "@gitpod/gitpod-protocol"; +import { Experiments } from "@gitpod/gitpod-protocol/lib/experiments/configcat-server"; +import * as chai from "chai"; +import { Container } from "inversify"; +import "mocha"; +import { createTestContainer } from "../test/service-testing-container-module"; +import { Authorizer, SYSTEM_USER } from "./authorizer"; +import { OrganizationService } from "../orgs/organization-service"; +import { WorkspaceService } from "../workspace/workspace-service"; +import { UserService } from "../user/user-service"; +import { ZedTokenCache } from "./caching-spicedb-authorizer"; +import { log } from "@gitpod/gitpod-protocol/lib/util/logging"; +import { ConfigProvider } from "../workspace/config-provider"; +import { runWithContext } from "../util/log-context"; + +const expect = chai.expect; + +const withCtx = (p: Promise) => runWithContext("test", {}, () => p); + +describe("CachingSpiceDBAuthorizer", async () => { + let container: Container; + let userSvc: UserService; + let orgSvc: OrganizationService; + let workspaceSvc: WorkspaceService; + let authorizer: Authorizer; + let zedTokenCache: ZedTokenCache; + + beforeEach(async () => { + container = createTestContainer(); + // TODO(gpl) Ideally we should be able to factor this out into the API. But to start somewhere, we'll mock it out here. + container.rebind(ConfigProvider).toConstantValue({ + fetchConfig: () => ({ + config: { + image: "gitpod/workspace-full:latest", + }, + }), + } as any as ConfigProvider); + Experiments.configureTestingClient({ + centralizedPermissions: true, + }); + userSvc = container.get(UserService); + orgSvc = container.get(OrganizationService); + workspaceSvc = container.get(WorkspaceService); + authorizer = container.get(Authorizer); + zedTokenCache = container.get(ZedTokenCache); + }); + + afterEach(async () => { + // Clean-up database + await resetDB(container.get(TypeORM)); + + container.unbindAll(); + }); + + it("should avoid new-enemy after removal", async () => { + // userB and userC are members of org1, userA is owner. + // All users are installation owned. + const org1 = await withCtx(orgSvc.createOrganization(SYSTEM_USER, "org1")); + const userA = await withCtx( + userSvc.createUser({ + organizationId: undefined, + identity: { + authProviderId: "github", + authId: "123", + authName: "userA", + }, + }), + ); + await withCtx(orgSvc.addOrUpdateMember(SYSTEM_USER, org1.id, userA.id, "owner")); + const userB = await withCtx( + userSvc.createUser({ + organizationId: undefined, + identity: { + authProviderId: "github", + authId: "456", + authName: "userB", + }, + }), + ); + await withCtx(orgSvc.addOrUpdateMember(SYSTEM_USER, org1.id, userB.id, "member")); + const userC = await withCtx( + userSvc.createUser({ + organizationId: undefined, + identity: { + authProviderId: "github", + authId: "789", + authName: "userC", + }, + }), + ); + await withCtx(orgSvc.addOrUpdateMember(SYSTEM_USER, org1.id, userC.id, "member")); + + // userA creates a workspace when userB is still member of the org + // All members have "read_info" (derived from membership) + const ws1 = await withCtx(createTestWorkspace(org1, userA)); + + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userB.id, "read_info", ws1.id)), + "userB should have read_info after removal", + ).to.be.true; + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userA.id, "read_info", ws1.id)), + "userA should have read_info after removal of userB", + ).to.be.true; + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userC.id, "read_info", ws1.id)), + "userC should have read_info after removal of userB", + ).to.be.true; + + // INTERNALS + async function printTokens(): Promise<{ ws1Token: string | undefined; org1Token: string | undefined }> { + const ws1Token = await zedTokenCache.get({ objectType: "workspace", objectId: ws1.id }); + log.info("ws1Token", ws1Token); + const org1Token = await zedTokenCache.get({ objectType: "organization", objectId: org1.id }); + log.info("org1Token", org1Token); + return { ws1Token, org1Token }; + } + const { org1Token: org1TokenT1 } = await printTokens(); + + // userB is removed from the org + await withCtx(orgSvc.removeOrganizationMember(SYSTEM_USER, org1.id, userB.id)); + + // INTERNALS + const { org1Token: org1TokenT2 } = await printTokens(); + expect(org1TokenT1 === org1TokenT2 && org1TokenT1 !== undefined && org1TokenT2 !== undefined).to.be.false; + + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userB.id, "read_info", ws1.id)), + "userB should have read_info after removal", + ).to.be.false; + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userA.id, "read_info", ws1.id)), + "userA should have read_info after removal of userB", + ).to.be.true; + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userC.id, "read_info", ws1.id)), + "userC should have read_info after removal of userB", + ).to.be.true; + }); + + async function createTestWorkspace(org: Organization, owner: User, project?: Project) { + const ws = await workspaceSvc.createWorkspace( + {}, + owner, + org.id, + project, + { + title: "gitpod", + repository: { + host: "github.com", + owner: "gitpod-io", + name: "gitpod", + cloneUrl: "https://github.com/gitpod-io/gitpod.git", + }, + revision: "asdf", + }, + "github.com/gitpod-io/gitpod", + ); + return ws; + } + + it("should avoid read-your-writes problem when adding a new user", async () => { + // userB and userC are members of org1, userA is owner. + // All users are installation owned. + const org1 = await withCtx(orgSvc.createOrganization(SYSTEM_USER, "org1")); + const userA = await withCtx( + userSvc.createUser({ + organizationId: undefined, + identity: { + authProviderId: "github", + authId: "123", + authName: "userA", + }, + }), + ); + await withCtx(orgSvc.addOrUpdateMember(SYSTEM_USER, org1.id, userA.id, "owner")); + const userC = await withCtx( + userSvc.createUser({ + organizationId: undefined, + identity: { + authProviderId: "github", + authId: "789", + authName: "userC", + }, + }), + ); + await withCtx(orgSvc.addOrUpdateMember(SYSTEM_USER, org1.id, userC.id, "member")); + + // userA creates a workspace before userB is member of the org + const ws1 = await withCtx(createTestWorkspace(org1, userA)); + + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userA.id, "read_info", ws1.id)), + "userA should have read_info after removal of userB", + ).to.be.true; + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userC.id, "read_info", ws1.id)), + "userC should have read_info after removal of userB", + ).to.be.true; + + // userB is added to the org + const userB = await withCtx( + userSvc.createUser({ + organizationId: undefined, + identity: { + authProviderId: "github", + authId: "456", + authName: "userB", + }, + }), + ); + await withCtx(orgSvc.addOrUpdateMember(SYSTEM_USER, org1.id, userB.id, "member")); + + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userB.id, "read_info", ws1.id)), + "userB should have read_info after removal", + ).to.be.true; + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userA.id, "read_info", ws1.id)), + "userA should have read_info after removal of userB", + ).to.be.true; + expect( + await withCtx(authorizer.hasPermissionOnWorkspace(userC.id, "read_info", ws1.id)), + "userC should have read_info after removal of userB", + ).to.be.true; + }); +}); diff --git a/components/server/src/authorization/caching-spicedb-authorizer.ts b/components/server/src/authorization/caching-spicedb-authorizer.ts new file mode 100644 index 00000000000000..4432c2d2e571e0 --- /dev/null +++ b/components/server/src/authorization/caching-spicedb-authorizer.ts @@ -0,0 +1,170 @@ +/** + * Copyright (c) 2023 Gitpod GmbH. All rights reserved. + * Licensed under the GNU Affero General Public License (AGPL). + * See License.AGPL.txt in the project root for license information. + */ + +import { v1 } from "@authzed/authzed-node"; +import { CheckResult, DeletionResult, SpiceDBAuthorizer, SpiceDBAuthorizerImpl } from "./spicedb-authorizer"; +import { log } from "@gitpod/gitpod-protocol/lib/util/logging"; +import { inject, injectable } from "inversify"; +import { clearZedTokenOnContext, getZedTokenFromContext, setZedTokenToContext } from "../util/log-context"; +import { base64decode } from "@jmondi/oauth2-server"; +import { DecodedZedToken } from "@gitpod/spicedb-impl/lib/impl/v1/impl.pb"; + +export type ZedTokenCacheKV = [objectRef: v1.ObjectReference | undefined, token: string | undefined]; +export const ZedTokenCache = Symbol("ZedTokenCache"); +export interface ZedTokenCache { + get(objectRef: v1.ObjectReference): Promise; + set(...kvs: ZedTokenCacheKV[]): Promise; + consistency(resourceRef: v1.ObjectReference | undefined): Promise; +} + +/** + * Works as a caching decorator for SpiceDBAuthorizerImpl. Delegates the actual caching strategy to ZedTokenCache. + */ +@injectable() +export class CachingSpiceDBAuthorizer implements SpiceDBAuthorizer { + constructor( + @inject(SpiceDBAuthorizerImpl) private readonly impl: SpiceDBAuthorizerImpl, + @inject(ZedTokenCache) private readonly tokenCache: ZedTokenCache, + ) {} + + async check( + req: v1.CheckPermissionRequest, + experimentsFields: { userId: string }, + forceEnablement?: boolean | undefined, + ): Promise { + req.consistency = await this.tokenCache.consistency(req.resource); + const result = await this.impl.check(req, experimentsFields, forceEnablement); + if (result.checkedAt) { + await this.tokenCache.set([req.resource, result.checkedAt]); + } + return result; + } + + async writeRelationships(...updates: v1.RelationshipUpdate[]): Promise { + const result = await this.impl.writeRelationships(...updates); + const writtenAt = result?.writtenAt?.token; + await this.tokenCache.set( + ...updates.map((u) => [ + u.relationship?.resource, + writtenAt, // Make sure that in case we don't get a writtenAt token here, we at least invalidate the cache + ]), + ); + return result; + } + + async deleteRelationships(req: v1.DeleteRelationshipsRequest): Promise { + const result = await this.impl.deleteRelationships(req); + log.info(`[spicedb] Deletion result`, { result }); + const deletedAt = result?.deletedAt; + if (deletedAt) { + await this.tokenCache.set( + ...result.relationships.map((r) => [r.relationship?.resource, deletedAt]), + ); + } + return result; + } + + async readRelationships(req: v1.ReadRelationshipsRequest): Promise { + // pass through with given consistency/caching for now + return this.impl.readRelationships(req); + } +} + +/** + * This is a simple implementation of the ZedTokenCache that uses the local context to store single ZedToken per API request, which is stored in AsyncLocalStorage. + * To make this work we make the "assumption" that ZedTokens string (meant to be opaque) represent a timestamp which we can order. This is at least true for the MySQL datastore we are using. + */ +@injectable() +export class RequestLocalZedTokenCache implements ZedTokenCache { + constructor() {} + + async get(objectRef: v1.ObjectReference): Promise { + return getZedTokenFromContext()?.token; + } + + async set(...kvs: ZedTokenCacheKV[]) { + const mustClearCache = kvs.some(([k, v]) => !!k && !v); // did we write a relationship without getting a writtenAt token? + if (mustClearCache) { + clearZedTokenOnContext(); + return; + } + + try { + const allTokens = [ + ...kvs.map(([_, v]) => (!!v ? StoredZedToken.fromToken(v) : undefined)), + getZedTokenFromContext(), + ].filter((v) => !!v) as StoredZedToken[]; + const freshest = this.freshest(...allTokens); + if (freshest) { + setZedTokenToContext(freshest); + } + } catch (err) { + log.warn("[spicedb] Failed to set ZedToken on context", err); + clearZedTokenOnContext(); + } + } + + async consistency(resourceRef: v1.ObjectReference | undefined): Promise { + function fullyConsistent() { + return v1.Consistency.create({ + requirement: { + oneofKind: "fullyConsistent", + fullyConsistent: true, + }, + }); + } + if (!resourceRef) { + return fullyConsistent(); + } + + const zedToken = await this.get(resourceRef); + if (!zedToken) { + return fullyConsistent(); + } + return v1.Consistency.create({ + requirement: { + oneofKind: "atLeastAsFresh", + atLeastAsFresh: v1.ZedToken.create({ + token: zedToken, + }), + }, + }); + } + + protected freshest(...zedTokens: StoredZedToken[]): StoredZedToken | undefined { + return zedTokens.reduce((prev, curr) => { + if (!prev || prev.timestamp < curr.timestamp) { + return curr; + } + return curr; + }, undefined); + } +} + +export interface StoredZedToken { + token: string; + timestamp: number; +} +namespace StoredZedToken { + export function create(token: string, timestamp: number): StoredZedToken { + return { token, timestamp }; + } + + export function fromToken(token: string): StoredZedToken | undefined { + // following https://github.com/authzed/spicedb/blob/786555c24af98abfe3f832c94dbae5ca518dcf50/pkg/zedtoken/zedtoken.go#L64-L100 + const decodedBytes = base64decode(token); + const decodedToken = DecodedZedToken.decode(Buffer.from(decodedBytes, "utf8")).v1; + if (!decodedToken) { + return undefined; + } + + // for MySQL: + // - https://github.com/authzed/spicedb/blob/main/internal/datastore/mysql/revisions.go#L182C1-L189C2 + // - https://github.com/authzed/spicedb/blob/786555c24af98abfe3f832c94dbae5ca518dcf50/pkg/datastore/revision/decimal.go#L53 + const timestamp = parseInt(decodedToken.revision, 10); + return { token, timestamp }; + } +} diff --git a/components/server/src/authorization/spicedb-authorizer.ts b/components/server/src/authorization/spicedb-authorizer.ts index 2dfbd81f2ca8f0..20b7b9f2af7c81 100644 --- a/components/server/src/authorization/spicedb-authorizer.ts +++ b/components/server/src/authorization/spicedb-authorizer.ts @@ -38,8 +38,32 @@ async function tryThree(errMessage: string, code: (attempt: number) => Promis throw new Error("unreachable"); } +export const SpiceDBAuthorizer = Symbol("SpiceDBAuthorizer"); +export interface SpiceDBAuthorizer { + check( + req: v1.CheckPermissionRequest, + experimentsFields: { + userId: string; + }, + forceEnablement?: boolean, + ): Promise; + writeRelationships(...updates: v1.RelationshipUpdate[]): Promise; + deleteRelationships(req: v1.DeleteRelationshipsRequest): Promise; + readRelationships(req: v1.ReadRelationshipsRequest): Promise; +} + +export interface CheckResult { + permitted: boolean; + checkedAt?: string; +} + +export interface DeletionResult { + relationships: v1.ReadRelationshipsResponse[]; + deletedAt?: string; +} + @injectable() -export class SpiceDBAuthorizer { +export class SpiceDBAuthorizerImpl implements SpiceDBAuthorizer { constructor( @inject(SpiceDBClientProvider) private readonly clientProvider: SpiceDBClientProvider, @@ -55,9 +79,9 @@ export class SpiceDBAuthorizer { userId: string; }, forceEnablement?: boolean, - ): Promise { + ): Promise { if (!(await isFgaWritesEnabled(experimentsFields.userId))) { - return true; + return { permitted: true }; } const featureEnabled = !!forceEnablement || (await isFgaChecksEnabled(experimentsFields.userId)); const result = (async () => { @@ -73,23 +97,23 @@ export class SpiceDBAuthorizer { response: new TrustedValue(response), request: new TrustedValue(req), }); - return true; + return { permitted: true, checkedAt: response.checkedAt?.token }; } - return permitted; + return { permitted, checkedAt: response.checkedAt?.token }; } catch (err) { error = err; log.error("[spicedb] Failed to perform authorization check.", err, { request: new TrustedValue(req), }); - return !featureEnabled; + return { permitted: !featureEnabled }; } finally { observeSpicedbClientLatency("check", error, timer()); } })(); // if the feature is not enabld, we don't await if (!featureEnabled) { - return true; + return { permitted: true }; } return result; } @@ -114,10 +138,11 @@ export class SpiceDBAuthorizer { } } - async deleteRelationships(req: v1.DeleteRelationshipsRequest): Promise { + async deleteRelationships(req: v1.DeleteRelationshipsRequest): Promise { const timer = spicedbClientLatency.startTimer(); let error: Error | undefined; try { + let deletedAt: string | undefined = undefined; const existing = await tryThree("readRelationships before deleteRelationships failed.", () => this.client.readRelationships(v1.ReadRelationshipsRequest.create(req), this.callOptions), ); @@ -125,6 +150,7 @@ export class SpiceDBAuthorizer { const response = await tryThree("deleteRelationships failed.", () => this.client.deleteRelationships(req, this.callOptions), ); + deletedAt = response.deletedAt?.token; const after = await tryThree("readRelationships failed.", () => this.client.readRelationships(v1.ReadRelationshipsRequest.create(req), this.callOptions), ); @@ -137,13 +163,16 @@ export class SpiceDBAuthorizer { existing, }); } - return existing; + return { + relationships: existing, + deletedAt, + }; } catch (err) { error = err; // While in we're running two authorization systems in parallel, we do not hard fail on writes. //TODO throw new ApplicationError(ErrorCodes.INTERNAL_SERVER_ERROR, "Failed to delete relationships."); log.error("[spicedb] Failed to delete relationships.", err, { request: new TrustedValue(req) }); - return []; + return { relationships: [] }; } finally { observeSpicedbClientLatency("delete", error, timer()); } diff --git a/components/server/src/container-module.ts b/components/server/src/container-module.ts index fde10a6926bbb1..7b7f007fd49794 100644 --- a/components/server/src/container-module.ts +++ b/components/server/src/container-module.ts @@ -51,7 +51,7 @@ import { Authorizer, createInitializingAuthorizer } from "./authorization/author import { RelationshipUpdater } from "./authorization/relationship-updater"; import { RelationshipUpdateJob } from "./authorization/relationship-updater-job"; import { SpiceDBClientProvider, spiceDBConfigFromEnv } from "./authorization/spicedb"; -import { SpiceDBAuthorizer } from "./authorization/spicedb-authorizer"; +import { SpiceDBAuthorizer, SpiceDBAuthorizerImpl } from "./authorization/spicedb-authorizer"; import { BillingModes } from "./billing/billing-mode"; import { EntitlementService, EntitlementServiceImpl } from "./billing/entitlement-service"; import { EntitlementServiceUBP } from "./billing/entitlement-service-ubp"; @@ -129,6 +129,11 @@ import { WorkspaceFactory } from "./workspace/workspace-factory"; import { WorkspaceService } from "./workspace/workspace-service"; import { WorkspaceStartController } from "./workspace/workspace-start-controller"; import { WorkspaceStarter } from "./workspace/workspace-starter"; +import { + CachingSpiceDBAuthorizer, + RequestLocalZedTokenCache, + ZedTokenCache, +} from "./authorization/caching-spicedb-authorizer"; export const productionContainerModule = new ContainerModule( (bind, unbind, isBound, rebind, unbindAsync, onActivation, onDeactivation) => { @@ -317,7 +322,11 @@ export const productionContainerModule = new ContainerModule( ); }) .inSingletonScope(); - bind(SpiceDBAuthorizer).toSelf().inSingletonScope(); + bind(SpiceDBAuthorizerImpl).toSelf().inSingletonScope(); + bind(CachingSpiceDBAuthorizer).toSelf().inSingletonScope(); + bind(RequestLocalZedTokenCache).toSelf().inSingletonScope(); + bind(ZedTokenCache).to(RequestLocalZedTokenCache).inSingletonScope(); + bind(SpiceDBAuthorizer).to(CachingSpiceDBAuthorizer).inSingletonScope(); bind(Authorizer) .toDynamicValue((ctx) => { const authorizer = ctx.container.get(SpiceDBAuthorizer); diff --git a/components/server/src/util/log-context.ts b/components/server/src/util/log-context.ts index a88dad8b316368..d036dfb4aceafe 100644 --- a/components/server/src/util/log-context.ts +++ b/components/server/src/util/log-context.ts @@ -8,6 +8,7 @@ import { LogContext } from "@gitpod/gitpod-protocol/lib/util/logging"; import { AsyncLocalStorage } from "node:async_hooks"; import { performance } from "node:perf_hooks"; import { v4 } from "uuid"; +import { StoredZedToken } from "../authorization/caching-spicedb-authorizer"; export type LogContextOptions = LogContext & { contextId?: string; @@ -21,6 +22,8 @@ type EnhancedLogContext = LogContextOptions & { contextKind: string; contextId: string; contextTimeMs: number; +} & { + zedToken?: StoredZedToken; }; const asyncLocalStorage = new AsyncLocalStorage(); @@ -64,3 +67,19 @@ export function wrapAsyncGenerator( }, }; } + +export function getZedTokenFromContext(): StoredZedToken | undefined { + return asyncLocalStorage.getStore()?.zedToken; +} +export function setZedTokenToContext(zedToken: StoredZedToken) { + const ctx = asyncLocalStorage.getStore(); + if (ctx) { + ctx.zedToken = zedToken; + } +} +export function clearZedTokenOnContext() { + const ctx = asyncLocalStorage.getStore(); + if (ctx) { + ctx.zedToken = undefined; + } +} diff --git a/components/spicedb/buf.gen.yaml b/components/spicedb/buf.gen.yaml new file mode 100755 index 00000000000000..e3ef68ef8c9d30 --- /dev/null +++ b/components/spicedb/buf.gen.yaml @@ -0,0 +1,13 @@ +#!/usr/bin/env -S buf generate --template buf.gen.yaml https://github.com/authzed/spicedb.git#tag=v1.22.2 --path proto/internal/impl/v1/impl.proto +# The version refers to the version of the SpiceDB image/binary that we are running +version: v1 +plugins: + - name: ts_proto + out: typescript/src + path: typescript/node_modules/.bin/protoc-gen-ts_proto + opt: + - context=true + - lowerCaseServiceMethods=true + - stringEnums=true + - fileSuffix=.pb + - outputServices=nice-grpc,outputServices=generic-definitions,useExactTypes=false diff --git a/components/spicedb/buf.yaml b/components/spicedb/buf.yaml new file mode 100644 index 00000000000000..dc121499a167a7 --- /dev/null +++ b/components/spicedb/buf.yaml @@ -0,0 +1,11 @@ +version: v1 +breaking: + use: + - FILE +lint: + use: + - DEFAULT + except: + - ENUM_ZERO_VALUE_SUFFIX + ignore: + - google diff --git a/components/spicedb/generate.sh b/components/spicedb/generate.sh new file mode 100755 index 00000000000000..8a70abf0a6524c --- /dev/null +++ b/components/spicedb/generate.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +if [ -n "$DEBUG" ]; then + set -x +fi + +set -o errexit +set -o nounset +set -o pipefail + +ROOT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)/../../ + +# include protoc bash functions +# shellcheck disable=SC1090,SC1091 +source "$ROOT_DIR"/scripts/protoc-generator.sh + +install_dependencies +./buf.gen.yaml +update_license diff --git a/components/spicedb/typescript/BUILD.yaml b/components/spicedb/typescript/BUILD.yaml new file mode 100644 index 00000000000000..2c12279f020f39 --- /dev/null +++ b/components/spicedb/typescript/BUILD.yaml @@ -0,0 +1,14 @@ +packages: + - name: lib + type: yarn + srcs: + - src/** + - package.json + - tsconfig.json + config: + packaging: library + dontTest: true + commands: + build: ["yarn", "build"] + yarnLock: ${coreYarnLockBase}/../yarn.lock + tsconfig: tsconfig.json diff --git a/components/spicedb/typescript/package.json b/components/spicedb/typescript/package.json new file mode 100644 index 00000000000000..924d555c8768af --- /dev/null +++ b/components/spicedb/typescript/package.json @@ -0,0 +1,26 @@ +{ + "name": "@gitpod/spicedb-impl", + "version": "0.1.5", + "license": "UNLICENSED", + "files": [ + "lib" + ], + "scripts": { + "build": "mkdir -p lib; tsc", + "watch": "leeway exec --package .:lib --transitive-dependencies --filter-type yarn --components --parallel -- tsc -w --preserveWatchOutput" + }, + "overrides": { + "long": "4.0.0" + }, + "dependencies": { + "long": "4.0.0", + "nice-grpc": "^2.0.0", + "ts-proto": "^1.153.0" + }, + "devDependencies": { + "@types/long": "4.0.0", + "grpc-tools": "^1.12.4", + "typescript": "~4.4.2", + "typescript-formatter": "^7.2.2" + } +} diff --git a/components/spicedb/typescript/src/google/api/expr/v1alpha1/checked.pb.ts b/components/spicedb/typescript/src/google/api/expr/v1alpha1/checked.pb.ts new file mode 100644 index 00000000000000..efe7c339c5a419 --- /dev/null +++ b/components/spicedb/typescript/src/google/api/expr/v1alpha1/checked.pb.ts @@ -0,0 +1,1896 @@ +/** + * Copyright (c) 2023 Gitpod GmbH. All rights reserved. + * Licensed under the GNU Affero General Public License (AGPL). + * See License.AGPL.txt in the project root for license information. + */ + +/* eslint-disable */ +import * as _m0 from "protobufjs/minimal"; +import { Empty } from "../../../protobuf/empty.pb"; +import { NullValue, nullValueFromJSON, nullValueToJSON, nullValueToNumber } from "../../../protobuf/struct.pb"; +import { Constant, Expr, SourceInfo } from "./syntax.pb"; +import Long = require("long"); + +export const protobufPackage = "google.api.expr.v1alpha1"; + +/** A CEL expression which has been successfully type checked. */ +export interface CheckedExpr { + /** + * A map from expression ids to resolved references. + * + * The following entries are in this table: + * + * - An Ident or Select expression is represented here if it resolves to a + * declaration. For instance, if `a.b.c` is represented by + * `select(select(id(a), b), c)`, and `a.b` resolves to a declaration, + * while `c` is a field selection, then the reference is attached to the + * nested select expression (but not to the id or or the outer select). + * In turn, if `a` resolves to a declaration and `b.c` are field selections, + * the reference is attached to the ident expression. + * - Every Call expression has an entry here, identifying the function being + * called. + * - Every CreateStruct expression for a message has an entry, identifying + * the message. + */ + referenceMap: { [key: number]: Reference }; + /** + * A map from expression ids to types. + * + * Every expression node which has a type different than DYN has a mapping + * here. If an expression has type DYN, it is omitted from this map to save + * space. + */ + typeMap: { [key: number]: Type }; + /** + * The source info derived from input that generated the parsed `expr` and + * any optimizations made during the type-checking pass. + */ + sourceInfo: + | SourceInfo + | undefined; + /** + * The expr version indicates the major / minor version number of the `expr` + * representation. + * + * The most common reason for a version change will be to indicate to the CEL + * runtimes that transformations have been performed on the expr during static + * analysis. In some cases, this will save the runtime the work of applying + * the same or similar transformations prior to evaluation. + */ + exprVersion: string; + /** + * The checked expression. Semantically equivalent to the parsed `expr`, but + * may have structural differences. + */ + expr: Expr | undefined; +} + +export interface CheckedExpr_ReferenceMapEntry { + key: number; + value: Reference | undefined; +} + +export interface CheckedExpr_TypeMapEntry { + key: number; + value: Type | undefined; +} + +/** Represents a CEL type. */ +export interface Type { + /** Dynamic type. */ + dyn?: + | Empty + | undefined; + /** Null value. */ + null?: + | NullValue + | undefined; + /** Primitive types: `true`, `1u`, `-2.0`, `'string'`, `b'bytes'`. */ + primitive?: + | Type_PrimitiveType + | undefined; + /** Wrapper of a primitive type, e.g. `google.protobuf.Int64Value`. */ + wrapper?: + | Type_PrimitiveType + | undefined; + /** Well-known protobuf type such as `google.protobuf.Timestamp`. */ + wellKnown?: + | Type_WellKnownType + | undefined; + /** Parameterized list with elements of `list_type`, e.g. `list`. */ + listType?: + | Type_ListType + | undefined; + /** Parameterized map with typed keys and values. */ + mapType?: + | Type_MapType + | undefined; + /** Function type. */ + function?: + | Type_FunctionType + | undefined; + /** + * Protocol buffer message type. + * + * The `message_type` string specifies the qualified message type name. For + * example, `google.plus.Profile`. + */ + messageType?: + | string + | undefined; + /** + * Type param type. + * + * The `type_param` string specifies the type parameter name, e.g. `list` + * would be a `list_type` whose element type was a `type_param` type + * named `E`. + */ + typeParam?: + | string + | undefined; + /** + * Type type. + * + * The `type` value specifies the target type. e.g. int is type with a + * target type of `Primitive.INT`. + */ + type?: + | Type + | undefined; + /** + * Error type. + * + * During type-checking if an expression is an error, its type is propagated + * as the `ERROR` type. This permits the type-checker to discover other + * errors present in the expression. + */ + error?: + | Empty + | undefined; + /** Abstract, application defined type. */ + abstractType?: Type_AbstractType | undefined; +} + +/** CEL primitive types. */ +export enum Type_PrimitiveType { + /** PRIMITIVE_TYPE_UNSPECIFIED - Unspecified type. */ + PRIMITIVE_TYPE_UNSPECIFIED = "PRIMITIVE_TYPE_UNSPECIFIED", + /** BOOL - Boolean type. */ + BOOL = "BOOL", + /** + * INT64 - Int64 type. + * + * Proto-based integer values are widened to int64. + */ + INT64 = "INT64", + /** + * UINT64 - Uint64 type. + * + * Proto-based unsigned integer values are widened to uint64. + */ + UINT64 = "UINT64", + /** + * DOUBLE - Double type. + * + * Proto-based float values are widened to double values. + */ + DOUBLE = "DOUBLE", + /** STRING - String type. */ + STRING = "STRING", + /** BYTES - Bytes type. */ + BYTES = "BYTES", + UNRECOGNIZED = "UNRECOGNIZED", +} + +export function type_PrimitiveTypeFromJSON(object: any): Type_PrimitiveType { + switch (object) { + case 0: + case "PRIMITIVE_TYPE_UNSPECIFIED": + return Type_PrimitiveType.PRIMITIVE_TYPE_UNSPECIFIED; + case 1: + case "BOOL": + return Type_PrimitiveType.BOOL; + case 2: + case "INT64": + return Type_PrimitiveType.INT64; + case 3: + case "UINT64": + return Type_PrimitiveType.UINT64; + case 4: + case "DOUBLE": + return Type_PrimitiveType.DOUBLE; + case 5: + case "STRING": + return Type_PrimitiveType.STRING; + case 6: + case "BYTES": + return Type_PrimitiveType.BYTES; + case -1: + case "UNRECOGNIZED": + default: + return Type_PrimitiveType.UNRECOGNIZED; + } +} + +export function type_PrimitiveTypeToJSON(object: Type_PrimitiveType): string { + switch (object) { + case Type_PrimitiveType.PRIMITIVE_TYPE_UNSPECIFIED: + return "PRIMITIVE_TYPE_UNSPECIFIED"; + case Type_PrimitiveType.BOOL: + return "BOOL"; + case Type_PrimitiveType.INT64: + return "INT64"; + case Type_PrimitiveType.UINT64: + return "UINT64"; + case Type_PrimitiveType.DOUBLE: + return "DOUBLE"; + case Type_PrimitiveType.STRING: + return "STRING"; + case Type_PrimitiveType.BYTES: + return "BYTES"; + case Type_PrimitiveType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function type_PrimitiveTypeToNumber(object: Type_PrimitiveType): number { + switch (object) { + case Type_PrimitiveType.PRIMITIVE_TYPE_UNSPECIFIED: + return 0; + case Type_PrimitiveType.BOOL: + return 1; + case Type_PrimitiveType.INT64: + return 2; + case Type_PrimitiveType.UINT64: + return 3; + case Type_PrimitiveType.DOUBLE: + return 4; + case Type_PrimitiveType.STRING: + return 5; + case Type_PrimitiveType.BYTES: + return 6; + case Type_PrimitiveType.UNRECOGNIZED: + default: + return -1; + } +} + +/** Well-known protobuf types treated with first-class support in CEL. */ +export enum Type_WellKnownType { + /** WELL_KNOWN_TYPE_UNSPECIFIED - Unspecified type. */ + WELL_KNOWN_TYPE_UNSPECIFIED = "WELL_KNOWN_TYPE_UNSPECIFIED", + /** + * ANY - Well-known protobuf.Any type. + * + * Any types are a polymorphic message type. During type-checking they are + * treated like `DYN` types, but at runtime they are resolved to a specific + * message type specified at evaluation time. + */ + ANY = "ANY", + /** TIMESTAMP - Well-known protobuf.Timestamp type, internally referenced as `timestamp`. */ + TIMESTAMP = "TIMESTAMP", + /** DURATION - Well-known protobuf.Duration type, internally referenced as `duration`. */ + DURATION = "DURATION", + UNRECOGNIZED = "UNRECOGNIZED", +} + +export function type_WellKnownTypeFromJSON(object: any): Type_WellKnownType { + switch (object) { + case 0: + case "WELL_KNOWN_TYPE_UNSPECIFIED": + return Type_WellKnownType.WELL_KNOWN_TYPE_UNSPECIFIED; + case 1: + case "ANY": + return Type_WellKnownType.ANY; + case 2: + case "TIMESTAMP": + return Type_WellKnownType.TIMESTAMP; + case 3: + case "DURATION": + return Type_WellKnownType.DURATION; + case -1: + case "UNRECOGNIZED": + default: + return Type_WellKnownType.UNRECOGNIZED; + } +} + +export function type_WellKnownTypeToJSON(object: Type_WellKnownType): string { + switch (object) { + case Type_WellKnownType.WELL_KNOWN_TYPE_UNSPECIFIED: + return "WELL_KNOWN_TYPE_UNSPECIFIED"; + case Type_WellKnownType.ANY: + return "ANY"; + case Type_WellKnownType.TIMESTAMP: + return "TIMESTAMP"; + case Type_WellKnownType.DURATION: + return "DURATION"; + case Type_WellKnownType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function type_WellKnownTypeToNumber(object: Type_WellKnownType): number { + switch (object) { + case Type_WellKnownType.WELL_KNOWN_TYPE_UNSPECIFIED: + return 0; + case Type_WellKnownType.ANY: + return 1; + case Type_WellKnownType.TIMESTAMP: + return 2; + case Type_WellKnownType.DURATION: + return 3; + case Type_WellKnownType.UNRECOGNIZED: + default: + return -1; + } +} + +/** List type with typed elements, e.g. `list`. */ +export interface Type_ListType { + /** The element type. */ + elemType: Type | undefined; +} + +/** Map type with parameterized key and value types, e.g. `map`. */ +export interface Type_MapType { + /** The type of the key. */ + keyType: + | Type + | undefined; + /** The type of the value. */ + valueType: Type | undefined; +} + +/** Function type with result and arg types. */ +export interface Type_FunctionType { + /** Result type of the function. */ + resultType: + | Type + | undefined; + /** Argument types of the function. */ + argTypes: Type[]; +} + +/** Application defined abstract type. */ +export interface Type_AbstractType { + /** The fully qualified name of this abstract type. */ + name: string; + /** Parameter types for this abstract type. */ + parameterTypes: Type[]; +} + +/** + * Represents a declaration of a named value or function. + * + * A declaration is part of the contract between the expression, the agent + * evaluating that expression, and the caller requesting evaluation. + */ +export interface Decl { + /** + * The fully qualified name of the declaration. + * + * Declarations are organized in containers and this represents the full path + * to the declaration in its container, as in `google.api.expr.Decl`. + * + * Declarations used as [FunctionDecl.Overload][google.api.expr.v1alpha1.Decl.FunctionDecl.Overload] parameters may or may not + * have a name depending on whether the overload is function declaration or a + * function definition containing a result [Expr][google.api.expr.v1alpha1.Expr]. + */ + name: string; + /** Identifier declaration. */ + ident?: + | Decl_IdentDecl + | undefined; + /** Function declaration. */ + function?: Decl_FunctionDecl | undefined; +} + +/** + * Identifier declaration which specifies its type and optional `Expr` value. + * + * An identifier without a value is a declaration that must be provided at + * evaluation time. An identifier with a value should resolve to a constant, + * but may be used in conjunction with other identifiers bound at evaluation + * time. + */ +export interface Decl_IdentDecl { + /** Required. The type of the identifier. */ + type: + | Type + | undefined; + /** + * The constant value of the identifier. If not specified, the identifier + * must be supplied at evaluation time. + */ + value: + | Constant + | undefined; + /** Documentation string for the identifier. */ + doc: string; +} + +/** + * Function declaration specifies one or more overloads which indicate the + * function's parameter types and return type. + * + * Functions have no observable side-effects (there may be side-effects like + * logging which are not observable from CEL). + */ +export interface Decl_FunctionDecl { + /** Required. List of function overloads, must contain at least one overload. */ + overloads: Decl_FunctionDecl_Overload[]; +} + +/** + * An overload indicates a function's parameter types and return type, and + * may optionally include a function body described in terms of [Expr][google.api.expr.v1alpha1.Expr] + * values. + * + * Functions overloads are declared in either a function or method + * call-style. For methods, the `params[0]` is the expected type of the + * target receiver. + * + * Overloads must have non-overlapping argument types after erasure of all + * parameterized type variables (similar as type erasure in Java). + */ +export interface Decl_FunctionDecl_Overload { + /** + * Required. Globally unique overload name of the function which reflects + * the function name and argument types. + * + * This will be used by a [Reference][google.api.expr.v1alpha1.Reference] to indicate the `overload_id` that + * was resolved for the function `name`. + */ + overloadId: string; + /** + * List of function parameter [Type][google.api.expr.v1alpha1.Type] values. + * + * Param types are disjoint after generic type parameters have been + * replaced with the type `DYN`. Since the `DYN` type is compatible with + * any other type, this means that if `A` is a type parameter, the + * function types `int` and `int` are not disjoint. Likewise, + * `map` is not disjoint from `map`. + * + * When the `result_type` of a function is a generic type param, the + * type param name also appears as the `type` of on at least one params. + */ + params: Type[]; + /** + * The type param names associated with the function declaration. + * + * For example, `function ex(K key, map map) : V` would yield + * the type params of `K, V`. + */ + typeParams: string[]; + /** + * Required. The result type of the function. For example, the operator + * `string.isEmpty()` would have `result_type` of `kind: BOOL`. + */ + resultType: + | Type + | undefined; + /** + * Whether the function is to be used in a method call-style `x.f(...)` + * or a function call-style `f(x, ...)`. + * + * For methods, the first parameter declaration, `params[0]` is the + * expected type of the target receiver. + */ + isInstanceFunction: boolean; + /** Documentation string for the overload. */ + doc: string; +} + +/** Describes a resolved reference to a declaration. */ +export interface Reference { + /** The fully qualified name of the declaration. */ + name: string; + /** + * For references to functions, this is a list of `Overload.overload_id` + * values which match according to typing rules. + * + * If the list has more than one element, overload resolution among the + * presented candidates must happen at runtime because of dynamic types. The + * type checker attempts to narrow down this list as much as possible. + * + * Empty if this is not a reference to a [Decl.FunctionDecl][google.api.expr.v1alpha1.Decl.FunctionDecl]. + */ + overloadId: string[]; + /** + * For references to constants, this may contain the value of the + * constant if known at compile time. + */ + value: Constant | undefined; +} + +function createBaseCheckedExpr(): CheckedExpr { + return { referenceMap: {}, typeMap: {}, sourceInfo: undefined, exprVersion: "", expr: undefined }; +} + +export const CheckedExpr = { + encode(message: CheckedExpr, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + Object.entries(message.referenceMap).forEach(([key, value]) => { + CheckedExpr_ReferenceMapEntry.encode({ key: key as any, value }, writer.uint32(18).fork()).ldelim(); + }); + Object.entries(message.typeMap).forEach(([key, value]) => { + CheckedExpr_TypeMapEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).ldelim(); + }); + if (message.sourceInfo !== undefined) { + SourceInfo.encode(message.sourceInfo, writer.uint32(42).fork()).ldelim(); + } + if (message.exprVersion !== "") { + writer.uint32(50).string(message.exprVersion); + } + if (message.expr !== undefined) { + Expr.encode(message.expr, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CheckedExpr { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCheckedExpr(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + if (tag !== 18) { + break; + } + + const entry2 = CheckedExpr_ReferenceMapEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.referenceMap[entry2.key] = entry2.value; + } + continue; + case 3: + if (tag !== 26) { + break; + } + + const entry3 = CheckedExpr_TypeMapEntry.decode(reader, reader.uint32()); + if (entry3.value !== undefined) { + message.typeMap[entry3.key] = entry3.value; + } + continue; + case 5: + if (tag !== 42) { + break; + } + + message.sourceInfo = SourceInfo.decode(reader, reader.uint32()); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.exprVersion = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.expr = Expr.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): CheckedExpr { + return { + referenceMap: isObject(object.referenceMap) + ? Object.entries(object.referenceMap).reduce<{ [key: number]: Reference }>((acc, [key, value]) => { + acc[Number(key)] = Reference.fromJSON(value); + return acc; + }, {}) + : {}, + typeMap: isObject(object.typeMap) + ? Object.entries(object.typeMap).reduce<{ [key: number]: Type }>((acc, [key, value]) => { + acc[Number(key)] = Type.fromJSON(value); + return acc; + }, {}) + : {}, + sourceInfo: isSet(object.sourceInfo) ? SourceInfo.fromJSON(object.sourceInfo) : undefined, + exprVersion: isSet(object.exprVersion) ? String(object.exprVersion) : "", + expr: isSet(object.expr) ? Expr.fromJSON(object.expr) : undefined, + }; + }, + + toJSON(message: CheckedExpr): unknown { + const obj: any = {}; + if (message.referenceMap) { + const entries = Object.entries(message.referenceMap); + if (entries.length > 0) { + obj.referenceMap = {}; + entries.forEach(([k, v]) => { + obj.referenceMap[k] = Reference.toJSON(v); + }); + } + } + if (message.typeMap) { + const entries = Object.entries(message.typeMap); + if (entries.length > 0) { + obj.typeMap = {}; + entries.forEach(([k, v]) => { + obj.typeMap[k] = Type.toJSON(v); + }); + } + } + if (message.sourceInfo !== undefined) { + obj.sourceInfo = SourceInfo.toJSON(message.sourceInfo); + } + if (message.exprVersion !== "") { + obj.exprVersion = message.exprVersion; + } + if (message.expr !== undefined) { + obj.expr = Expr.toJSON(message.expr); + } + return obj; + }, + + create(base?: DeepPartial): CheckedExpr { + return CheckedExpr.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): CheckedExpr { + const message = createBaseCheckedExpr(); + message.referenceMap = Object.entries(object.referenceMap ?? {}).reduce<{ [key: number]: Reference }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[Number(key)] = Reference.fromPartial(value); + } + return acc; + }, + {}, + ); + message.typeMap = Object.entries(object.typeMap ?? {}).reduce<{ [key: number]: Type }>((acc, [key, value]) => { + if (value !== undefined) { + acc[Number(key)] = Type.fromPartial(value); + } + return acc; + }, {}); + message.sourceInfo = (object.sourceInfo !== undefined && object.sourceInfo !== null) + ? SourceInfo.fromPartial(object.sourceInfo) + : undefined; + message.exprVersion = object.exprVersion ?? ""; + message.expr = (object.expr !== undefined && object.expr !== null) ? Expr.fromPartial(object.expr) : undefined; + return message; + }, +}; + +function createBaseCheckedExpr_ReferenceMapEntry(): CheckedExpr_ReferenceMapEntry { + return { key: 0, value: undefined }; +} + +export const CheckedExpr_ReferenceMapEntry = { + encode(message: CheckedExpr_ReferenceMapEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== 0) { + writer.uint32(8).int64(message.key); + } + if (message.value !== undefined) { + Reference.encode(message.value, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CheckedExpr_ReferenceMapEntry { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCheckedExpr_ReferenceMapEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.key = longToNumber(reader.int64() as Long); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.value = Reference.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): CheckedExpr_ReferenceMapEntry { + return { + key: isSet(object.key) ? Number(object.key) : 0, + value: isSet(object.value) ? Reference.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: CheckedExpr_ReferenceMapEntry): unknown { + const obj: any = {}; + if (message.key !== 0) { + obj.key = Math.round(message.key); + } + if (message.value !== undefined) { + obj.value = Reference.toJSON(message.value); + } + return obj; + }, + + create(base?: DeepPartial): CheckedExpr_ReferenceMapEntry { + return CheckedExpr_ReferenceMapEntry.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): CheckedExpr_ReferenceMapEntry { + const message = createBaseCheckedExpr_ReferenceMapEntry(); + message.key = object.key ?? 0; + message.value = (object.value !== undefined && object.value !== null) + ? Reference.fromPartial(object.value) + : undefined; + return message; + }, +}; + +function createBaseCheckedExpr_TypeMapEntry(): CheckedExpr_TypeMapEntry { + return { key: 0, value: undefined }; +} + +export const CheckedExpr_TypeMapEntry = { + encode(message: CheckedExpr_TypeMapEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== 0) { + writer.uint32(8).int64(message.key); + } + if (message.value !== undefined) { + Type.encode(message.value, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CheckedExpr_TypeMapEntry { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCheckedExpr_TypeMapEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.key = longToNumber(reader.int64() as Long); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.value = Type.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): CheckedExpr_TypeMapEntry { + return { + key: isSet(object.key) ? Number(object.key) : 0, + value: isSet(object.value) ? Type.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: CheckedExpr_TypeMapEntry): unknown { + const obj: any = {}; + if (message.key !== 0) { + obj.key = Math.round(message.key); + } + if (message.value !== undefined) { + obj.value = Type.toJSON(message.value); + } + return obj; + }, + + create(base?: DeepPartial): CheckedExpr_TypeMapEntry { + return CheckedExpr_TypeMapEntry.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): CheckedExpr_TypeMapEntry { + const message = createBaseCheckedExpr_TypeMapEntry(); + message.key = object.key ?? 0; + message.value = (object.value !== undefined && object.value !== null) ? Type.fromPartial(object.value) : undefined; + return message; + }, +}; + +function createBaseType(): Type { + return { + dyn: undefined, + null: undefined, + primitive: undefined, + wrapper: undefined, + wellKnown: undefined, + listType: undefined, + mapType: undefined, + function: undefined, + messageType: undefined, + typeParam: undefined, + type: undefined, + error: undefined, + abstractType: undefined, + }; +} + +export const Type = { + encode(message: Type, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.dyn !== undefined) { + Empty.encode(message.dyn, writer.uint32(10).fork()).ldelim(); + } + if (message.null !== undefined) { + writer.uint32(16).int32(nullValueToNumber(message.null)); + } + if (message.primitive !== undefined) { + writer.uint32(24).int32(type_PrimitiveTypeToNumber(message.primitive)); + } + if (message.wrapper !== undefined) { + writer.uint32(32).int32(type_PrimitiveTypeToNumber(message.wrapper)); + } + if (message.wellKnown !== undefined) { + writer.uint32(40).int32(type_WellKnownTypeToNumber(message.wellKnown)); + } + if (message.listType !== undefined) { + Type_ListType.encode(message.listType, writer.uint32(50).fork()).ldelim(); + } + if (message.mapType !== undefined) { + Type_MapType.encode(message.mapType, writer.uint32(58).fork()).ldelim(); + } + if (message.function !== undefined) { + Type_FunctionType.encode(message.function, writer.uint32(66).fork()).ldelim(); + } + if (message.messageType !== undefined) { + writer.uint32(74).string(message.messageType); + } + if (message.typeParam !== undefined) { + writer.uint32(82).string(message.typeParam); + } + if (message.type !== undefined) { + Type.encode(message.type, writer.uint32(90).fork()).ldelim(); + } + if (message.error !== undefined) { + Empty.encode(message.error, writer.uint32(98).fork()).ldelim(); + } + if (message.abstractType !== undefined) { + Type_AbstractType.encode(message.abstractType, writer.uint32(114).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Type { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseType(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.dyn = Empty.decode(reader, reader.uint32()); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.null = nullValueFromJSON(reader.int32()); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.primitive = type_PrimitiveTypeFromJSON(reader.int32()); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.wrapper = type_PrimitiveTypeFromJSON(reader.int32()); + continue; + case 5: + if (tag !== 40) { + break; + } + + message.wellKnown = type_WellKnownTypeFromJSON(reader.int32()); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.listType = Type_ListType.decode(reader, reader.uint32()); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.mapType = Type_MapType.decode(reader, reader.uint32()); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.function = Type_FunctionType.decode(reader, reader.uint32()); + continue; + case 9: + if (tag !== 74) { + break; + } + + message.messageType = reader.string(); + continue; + case 10: + if (tag !== 82) { + break; + } + + message.typeParam = reader.string(); + continue; + case 11: + if (tag !== 90) { + break; + } + + message.type = Type.decode(reader, reader.uint32()); + continue; + case 12: + if (tag !== 98) { + break; + } + + message.error = Empty.decode(reader, reader.uint32()); + continue; + case 14: + if (tag !== 114) { + break; + } + + message.abstractType = Type_AbstractType.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Type { + return { + dyn: isSet(object.dyn) ? Empty.fromJSON(object.dyn) : undefined, + null: isSet(object.null) ? nullValueFromJSON(object.null) : undefined, + primitive: isSet(object.primitive) ? type_PrimitiveTypeFromJSON(object.primitive) : undefined, + wrapper: isSet(object.wrapper) ? type_PrimitiveTypeFromJSON(object.wrapper) : undefined, + wellKnown: isSet(object.wellKnown) ? type_WellKnownTypeFromJSON(object.wellKnown) : undefined, + listType: isSet(object.listType) ? Type_ListType.fromJSON(object.listType) : undefined, + mapType: isSet(object.mapType) ? Type_MapType.fromJSON(object.mapType) : undefined, + function: isSet(object.function) ? Type_FunctionType.fromJSON(object.function) : undefined, + messageType: isSet(object.messageType) ? String(object.messageType) : undefined, + typeParam: isSet(object.typeParam) ? String(object.typeParam) : undefined, + type: isSet(object.type) ? Type.fromJSON(object.type) : undefined, + error: isSet(object.error) ? Empty.fromJSON(object.error) : undefined, + abstractType: isSet(object.abstractType) ? Type_AbstractType.fromJSON(object.abstractType) : undefined, + }; + }, + + toJSON(message: Type): unknown { + const obj: any = {}; + if (message.dyn !== undefined) { + obj.dyn = Empty.toJSON(message.dyn); + } + if (message.null !== undefined) { + obj.null = nullValueToJSON(message.null); + } + if (message.primitive !== undefined) { + obj.primitive = type_PrimitiveTypeToJSON(message.primitive); + } + if (message.wrapper !== undefined) { + obj.wrapper = type_PrimitiveTypeToJSON(message.wrapper); + } + if (message.wellKnown !== undefined) { + obj.wellKnown = type_WellKnownTypeToJSON(message.wellKnown); + } + if (message.listType !== undefined) { + obj.listType = Type_ListType.toJSON(message.listType); + } + if (message.mapType !== undefined) { + obj.mapType = Type_MapType.toJSON(message.mapType); + } + if (message.function !== undefined) { + obj.function = Type_FunctionType.toJSON(message.function); + } + if (message.messageType !== undefined) { + obj.messageType = message.messageType; + } + if (message.typeParam !== undefined) { + obj.typeParam = message.typeParam; + } + if (message.type !== undefined) { + obj.type = Type.toJSON(message.type); + } + if (message.error !== undefined) { + obj.error = Empty.toJSON(message.error); + } + if (message.abstractType !== undefined) { + obj.abstractType = Type_AbstractType.toJSON(message.abstractType); + } + return obj; + }, + + create(base?: DeepPartial): Type { + return Type.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Type { + const message = createBaseType(); + message.dyn = (object.dyn !== undefined && object.dyn !== null) ? Empty.fromPartial(object.dyn) : undefined; + message.null = object.null ?? undefined; + message.primitive = object.primitive ?? undefined; + message.wrapper = object.wrapper ?? undefined; + message.wellKnown = object.wellKnown ?? undefined; + message.listType = (object.listType !== undefined && object.listType !== null) + ? Type_ListType.fromPartial(object.listType) + : undefined; + message.mapType = (object.mapType !== undefined && object.mapType !== null) + ? Type_MapType.fromPartial(object.mapType) + : undefined; + message.function = (object.function !== undefined && object.function !== null) + ? Type_FunctionType.fromPartial(object.function) + : undefined; + message.messageType = object.messageType ?? undefined; + message.typeParam = object.typeParam ?? undefined; + message.type = (object.type !== undefined && object.type !== null) ? Type.fromPartial(object.type) : undefined; + message.error = (object.error !== undefined && object.error !== null) ? Empty.fromPartial(object.error) : undefined; + message.abstractType = (object.abstractType !== undefined && object.abstractType !== null) + ? Type_AbstractType.fromPartial(object.abstractType) + : undefined; + return message; + }, +}; + +function createBaseType_ListType(): Type_ListType { + return { elemType: undefined }; +} + +export const Type_ListType = { + encode(message: Type_ListType, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.elemType !== undefined) { + Type.encode(message.elemType, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Type_ListType { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseType_ListType(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.elemType = Type.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Type_ListType { + return { elemType: isSet(object.elemType) ? Type.fromJSON(object.elemType) : undefined }; + }, + + toJSON(message: Type_ListType): unknown { + const obj: any = {}; + if (message.elemType !== undefined) { + obj.elemType = Type.toJSON(message.elemType); + } + return obj; + }, + + create(base?: DeepPartial): Type_ListType { + return Type_ListType.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Type_ListType { + const message = createBaseType_ListType(); + message.elemType = (object.elemType !== undefined && object.elemType !== null) + ? Type.fromPartial(object.elemType) + : undefined; + return message; + }, +}; + +function createBaseType_MapType(): Type_MapType { + return { keyType: undefined, valueType: undefined }; +} + +export const Type_MapType = { + encode(message: Type_MapType, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.keyType !== undefined) { + Type.encode(message.keyType, writer.uint32(10).fork()).ldelim(); + } + if (message.valueType !== undefined) { + Type.encode(message.valueType, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Type_MapType { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseType_MapType(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.keyType = Type.decode(reader, reader.uint32()); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.valueType = Type.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Type_MapType { + return { + keyType: isSet(object.keyType) ? Type.fromJSON(object.keyType) : undefined, + valueType: isSet(object.valueType) ? Type.fromJSON(object.valueType) : undefined, + }; + }, + + toJSON(message: Type_MapType): unknown { + const obj: any = {}; + if (message.keyType !== undefined) { + obj.keyType = Type.toJSON(message.keyType); + } + if (message.valueType !== undefined) { + obj.valueType = Type.toJSON(message.valueType); + } + return obj; + }, + + create(base?: DeepPartial): Type_MapType { + return Type_MapType.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Type_MapType { + const message = createBaseType_MapType(); + message.keyType = (object.keyType !== undefined && object.keyType !== null) + ? Type.fromPartial(object.keyType) + : undefined; + message.valueType = (object.valueType !== undefined && object.valueType !== null) + ? Type.fromPartial(object.valueType) + : undefined; + return message; + }, +}; + +function createBaseType_FunctionType(): Type_FunctionType { + return { resultType: undefined, argTypes: [] }; +} + +export const Type_FunctionType = { + encode(message: Type_FunctionType, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.resultType !== undefined) { + Type.encode(message.resultType, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.argTypes) { + Type.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Type_FunctionType { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseType_FunctionType(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.resultType = Type.decode(reader, reader.uint32()); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.argTypes.push(Type.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Type_FunctionType { + return { + resultType: isSet(object.resultType) ? Type.fromJSON(object.resultType) : undefined, + argTypes: Array.isArray(object?.argTypes) ? object.argTypes.map((e: any) => Type.fromJSON(e)) : [], + }; + }, + + toJSON(message: Type_FunctionType): unknown { + const obj: any = {}; + if (message.resultType !== undefined) { + obj.resultType = Type.toJSON(message.resultType); + } + if (message.argTypes?.length) { + obj.argTypes = message.argTypes.map((e) => Type.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): Type_FunctionType { + return Type_FunctionType.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Type_FunctionType { + const message = createBaseType_FunctionType(); + message.resultType = (object.resultType !== undefined && object.resultType !== null) + ? Type.fromPartial(object.resultType) + : undefined; + message.argTypes = object.argTypes?.map((e) => Type.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseType_AbstractType(): Type_AbstractType { + return { name: "", parameterTypes: [] }; +} + +export const Type_AbstractType = { + encode(message: Type_AbstractType, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.parameterTypes) { + Type.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Type_AbstractType { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseType_AbstractType(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.parameterTypes.push(Type.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Type_AbstractType { + return { + name: isSet(object.name) ? String(object.name) : "", + parameterTypes: Array.isArray(object?.parameterTypes) + ? object.parameterTypes.map((e: any) => Type.fromJSON(e)) + : [], + }; + }, + + toJSON(message: Type_AbstractType): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.parameterTypes?.length) { + obj.parameterTypes = message.parameterTypes.map((e) => Type.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): Type_AbstractType { + return Type_AbstractType.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Type_AbstractType { + const message = createBaseType_AbstractType(); + message.name = object.name ?? ""; + message.parameterTypes = object.parameterTypes?.map((e) => Type.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseDecl(): Decl { + return { name: "", ident: undefined, function: undefined }; +} + +export const Decl = { + encode(message: Decl, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.ident !== undefined) { + Decl_IdentDecl.encode(message.ident, writer.uint32(18).fork()).ldelim(); + } + if (message.function !== undefined) { + Decl_FunctionDecl.encode(message.function, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Decl { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecl(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.ident = Decl_IdentDecl.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.function = Decl_FunctionDecl.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Decl { + return { + name: isSet(object.name) ? String(object.name) : "", + ident: isSet(object.ident) ? Decl_IdentDecl.fromJSON(object.ident) : undefined, + function: isSet(object.function) ? Decl_FunctionDecl.fromJSON(object.function) : undefined, + }; + }, + + toJSON(message: Decl): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.ident !== undefined) { + obj.ident = Decl_IdentDecl.toJSON(message.ident); + } + if (message.function !== undefined) { + obj.function = Decl_FunctionDecl.toJSON(message.function); + } + return obj; + }, + + create(base?: DeepPartial): Decl { + return Decl.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Decl { + const message = createBaseDecl(); + message.name = object.name ?? ""; + message.ident = (object.ident !== undefined && object.ident !== null) + ? Decl_IdentDecl.fromPartial(object.ident) + : undefined; + message.function = (object.function !== undefined && object.function !== null) + ? Decl_FunctionDecl.fromPartial(object.function) + : undefined; + return message; + }, +}; + +function createBaseDecl_IdentDecl(): Decl_IdentDecl { + return { type: undefined, value: undefined, doc: "" }; +} + +export const Decl_IdentDecl = { + encode(message: Decl_IdentDecl, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== undefined) { + Type.encode(message.type, writer.uint32(10).fork()).ldelim(); + } + if (message.value !== undefined) { + Constant.encode(message.value, writer.uint32(18).fork()).ldelim(); + } + if (message.doc !== "") { + writer.uint32(26).string(message.doc); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Decl_IdentDecl { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecl_IdentDecl(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.type = Type.decode(reader, reader.uint32()); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.value = Constant.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.doc = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Decl_IdentDecl { + return { + type: isSet(object.type) ? Type.fromJSON(object.type) : undefined, + value: isSet(object.value) ? Constant.fromJSON(object.value) : undefined, + doc: isSet(object.doc) ? String(object.doc) : "", + }; + }, + + toJSON(message: Decl_IdentDecl): unknown { + const obj: any = {}; + if (message.type !== undefined) { + obj.type = Type.toJSON(message.type); + } + if (message.value !== undefined) { + obj.value = Constant.toJSON(message.value); + } + if (message.doc !== "") { + obj.doc = message.doc; + } + return obj; + }, + + create(base?: DeepPartial): Decl_IdentDecl { + return Decl_IdentDecl.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Decl_IdentDecl { + const message = createBaseDecl_IdentDecl(); + message.type = (object.type !== undefined && object.type !== null) ? Type.fromPartial(object.type) : undefined; + message.value = (object.value !== undefined && object.value !== null) + ? Constant.fromPartial(object.value) + : undefined; + message.doc = object.doc ?? ""; + return message; + }, +}; + +function createBaseDecl_FunctionDecl(): Decl_FunctionDecl { + return { overloads: [] }; +} + +export const Decl_FunctionDecl = { + encode(message: Decl_FunctionDecl, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.overloads) { + Decl_FunctionDecl_Overload.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Decl_FunctionDecl { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecl_FunctionDecl(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.overloads.push(Decl_FunctionDecl_Overload.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Decl_FunctionDecl { + return { + overloads: Array.isArray(object?.overloads) + ? object.overloads.map((e: any) => Decl_FunctionDecl_Overload.fromJSON(e)) + : [], + }; + }, + + toJSON(message: Decl_FunctionDecl): unknown { + const obj: any = {}; + if (message.overloads?.length) { + obj.overloads = message.overloads.map((e) => Decl_FunctionDecl_Overload.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): Decl_FunctionDecl { + return Decl_FunctionDecl.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Decl_FunctionDecl { + const message = createBaseDecl_FunctionDecl(); + message.overloads = object.overloads?.map((e) => Decl_FunctionDecl_Overload.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseDecl_FunctionDecl_Overload(): Decl_FunctionDecl_Overload { + return { overloadId: "", params: [], typeParams: [], resultType: undefined, isInstanceFunction: false, doc: "" }; +} + +export const Decl_FunctionDecl_Overload = { + encode(message: Decl_FunctionDecl_Overload, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.overloadId !== "") { + writer.uint32(10).string(message.overloadId); + } + for (const v of message.params) { + Type.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.typeParams) { + writer.uint32(26).string(v!); + } + if (message.resultType !== undefined) { + Type.encode(message.resultType, writer.uint32(34).fork()).ldelim(); + } + if (message.isInstanceFunction === true) { + writer.uint32(40).bool(message.isInstanceFunction); + } + if (message.doc !== "") { + writer.uint32(50).string(message.doc); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Decl_FunctionDecl_Overload { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecl_FunctionDecl_Overload(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.overloadId = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.params.push(Type.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.typeParams.push(reader.string()); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.resultType = Type.decode(reader, reader.uint32()); + continue; + case 5: + if (tag !== 40) { + break; + } + + message.isInstanceFunction = reader.bool(); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.doc = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Decl_FunctionDecl_Overload { + return { + overloadId: isSet(object.overloadId) ? String(object.overloadId) : "", + params: Array.isArray(object?.params) ? object.params.map((e: any) => Type.fromJSON(e)) : [], + typeParams: Array.isArray(object?.typeParams) ? object.typeParams.map((e: any) => String(e)) : [], + resultType: isSet(object.resultType) ? Type.fromJSON(object.resultType) : undefined, + isInstanceFunction: isSet(object.isInstanceFunction) ? Boolean(object.isInstanceFunction) : false, + doc: isSet(object.doc) ? String(object.doc) : "", + }; + }, + + toJSON(message: Decl_FunctionDecl_Overload): unknown { + const obj: any = {}; + if (message.overloadId !== "") { + obj.overloadId = message.overloadId; + } + if (message.params?.length) { + obj.params = message.params.map((e) => Type.toJSON(e)); + } + if (message.typeParams?.length) { + obj.typeParams = message.typeParams; + } + if (message.resultType !== undefined) { + obj.resultType = Type.toJSON(message.resultType); + } + if (message.isInstanceFunction === true) { + obj.isInstanceFunction = message.isInstanceFunction; + } + if (message.doc !== "") { + obj.doc = message.doc; + } + return obj; + }, + + create(base?: DeepPartial): Decl_FunctionDecl_Overload { + return Decl_FunctionDecl_Overload.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Decl_FunctionDecl_Overload { + const message = createBaseDecl_FunctionDecl_Overload(); + message.overloadId = object.overloadId ?? ""; + message.params = object.params?.map((e) => Type.fromPartial(e)) || []; + message.typeParams = object.typeParams?.map((e) => e) || []; + message.resultType = (object.resultType !== undefined && object.resultType !== null) + ? Type.fromPartial(object.resultType) + : undefined; + message.isInstanceFunction = object.isInstanceFunction ?? false; + message.doc = object.doc ?? ""; + return message; + }, +}; + +function createBaseReference(): Reference { + return { name: "", overloadId: [], value: undefined }; +} + +export const Reference = { + encode(message: Reference, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.overloadId) { + writer.uint32(26).string(v!); + } + if (message.value !== undefined) { + Constant.encode(message.value, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Reference { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseReference(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.overloadId.push(reader.string()); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.value = Constant.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Reference { + return { + name: isSet(object.name) ? String(object.name) : "", + overloadId: Array.isArray(object?.overloadId) ? object.overloadId.map((e: any) => String(e)) : [], + value: isSet(object.value) ? Constant.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: Reference): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.overloadId?.length) { + obj.overloadId = message.overloadId; + } + if (message.value !== undefined) { + obj.value = Constant.toJSON(message.value); + } + return obj; + }, + + create(base?: DeepPartial): Reference { + return Reference.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Reference { + const message = createBaseReference(); + message.name = object.name ?? ""; + message.overloadId = object.overloadId?.map((e) => e) || []; + message.value = (object.value !== undefined && object.value !== null) + ? Constant.fromPartial(object.value) + : undefined; + return message; + }, +}; + +export interface DataLoaderOptions { + cache?: boolean; +} + +export interface DataLoaders { + rpcDataLoaderOptions?: DataLoaderOptions; + getDataLoader(identifier: string, constructorFn: () => T): T; +} + +declare const self: any | undefined; +declare const window: any | undefined; +declare const global: any | undefined; +const tsProtoGlobalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isObject(value: any): boolean { + return typeof value === "object" && value !== null; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/components/spicedb/typescript/src/google/api/expr/v1alpha1/syntax.pb.ts b/components/spicedb/typescript/src/google/api/expr/v1alpha1/syntax.pb.ts new file mode 100644 index 00000000000000..1ca5960dcb1f16 --- /dev/null +++ b/components/spicedb/typescript/src/google/api/expr/v1alpha1/syntax.pb.ts @@ -0,0 +1,2004 @@ +/** + * Copyright (c) 2023 Gitpod GmbH. All rights reserved. + * Licensed under the GNU Affero General Public License (AGPL). + * See License.AGPL.txt in the project root for license information. + */ + +/* eslint-disable */ +import * as _m0 from "protobufjs/minimal"; +import { Duration } from "../../../protobuf/duration.pb"; +import { NullValue, nullValueFromJSON, nullValueToJSON, nullValueToNumber } from "../../../protobuf/struct.pb"; +import { Timestamp } from "../../../protobuf/timestamp.pb"; +import Long = require("long"); + +export const protobufPackage = "google.api.expr.v1alpha1"; + +/** An expression together with source information as returned by the parser. */ +export interface ParsedExpr { + /** The parsed expression. */ + expr: + | Expr + | undefined; + /** The source info derived from input that generated the parsed `expr`. */ + sourceInfo: SourceInfo | undefined; +} + +/** + * An abstract representation of a common expression. + * + * Expressions are abstractly represented as a collection of identifiers, + * select statements, function calls, literals, and comprehensions. All + * operators with the exception of the '.' operator are modelled as function + * calls. This makes it easy to represent new operators into the existing AST. + * + * All references within expressions must resolve to a [Decl][google.api.expr.v1alpha1.Decl] provided at + * type-check for an expression to be valid. A reference may either be a bare + * identifier `name` or a qualified identifier `google.api.name`. References + * may either refer to a value or a function declaration. + * + * For example, the expression `google.api.name.startsWith('expr')` references + * the declaration `google.api.name` within a [Expr.Select][google.api.expr.v1alpha1.Expr.Select] expression, and + * the function declaration `startsWith`. + */ +export interface Expr { + /** + * Required. An id assigned to this node by the parser which is unique in a + * given expression tree. This is used to associate type information and other + * attributes to a node in the parse tree. + */ + id: number; + /** A literal expression. */ + constExpr?: + | Constant + | undefined; + /** An identifier expression. */ + identExpr?: + | Expr_Ident + | undefined; + /** A field selection expression, e.g. `request.auth`. */ + selectExpr?: + | Expr_Select + | undefined; + /** A call expression, including calls to predefined functions and operators. */ + callExpr?: + | Expr_Call + | undefined; + /** A list creation expression. */ + listExpr?: + | Expr_CreateList + | undefined; + /** A map or message creation expression. */ + structExpr?: + | Expr_CreateStruct + | undefined; + /** A comprehension expression. */ + comprehensionExpr?: Expr_Comprehension | undefined; +} + +/** An identifier expression. e.g. `request`. */ +export interface Expr_Ident { + /** + * Required. Holds a single, unqualified identifier, possibly preceded by a + * '.'. + * + * Qualified names are represented by the [Expr.Select][google.api.expr.v1alpha1.Expr.Select] expression. + */ + name: string; +} + +/** A field selection expression. e.g. `request.auth`. */ +export interface Expr_Select { + /** + * Required. The target of the selection expression. + * + * For example, in the select expression `request.auth`, the `request` + * portion of the expression is the `operand`. + */ + operand: + | Expr + | undefined; + /** + * Required. The name of the field to select. + * + * For example, in the select expression `request.auth`, the `auth` portion + * of the expression would be the `field`. + */ + field: string; + /** + * Whether the select is to be interpreted as a field presence test. + * + * This results from the macro `has(request.auth)`. + */ + testOnly: boolean; +} + +/** + * A call expression, including calls to predefined functions and operators. + * + * For example, `value == 10`, `size(map_value)`. + */ +export interface Expr_Call { + /** + * The target of an method call-style expression. For example, `x` in + * `x.f()`. + */ + target: + | Expr + | undefined; + /** Required. The name of the function or method being called. */ + function: string; + /** The arguments. */ + args: Expr[]; +} + +/** + * A list creation expression. + * + * Lists may either be homogenous, e.g. `[1, 2, 3]`, or heterogeneous, e.g. + * `dyn([1, 'hello', 2.0])` + */ +export interface Expr_CreateList { + /** The elements part of the list. */ + elements: Expr[]; +} + +/** + * A map or message creation expression. + * + * Maps are constructed as `{'key_name': 'value'}`. Message construction is + * similar, but prefixed with a type name and composed of field ids: + * `types.MyType{field_id: 'value'}`. + */ +export interface Expr_CreateStruct { + /** + * The type name of the message to be created, empty when creating map + * literals. + */ + messageName: string; + /** The entries in the creation expression. */ + entries: Expr_CreateStruct_Entry[]; +} + +/** Represents an entry. */ +export interface Expr_CreateStruct_Entry { + /** + * Required. An id assigned to this node by the parser which is unique + * in a given expression tree. This is used to associate type + * information and other attributes to the node. + */ + id: number; + /** The field key for a message creator statement. */ + fieldKey?: + | string + | undefined; + /** The key expression for a map creation statement. */ + mapKey?: + | Expr + | undefined; + /** Required. The value assigned to the key. */ + value: Expr | undefined; +} + +/** + * A comprehension expression applied to a list or map. + * + * Comprehensions are not part of the core syntax, but enabled with macros. + * A macro matches a specific call signature within a parsed AST and replaces + * the call with an alternate AST block. Macro expansion happens at parse + * time. + * + * The following macros are supported within CEL: + * + * Aggregate type macros may be applied to all elements in a list or all keys + * in a map: + * + * * `all`, `exists`, `exists_one` - test a predicate expression against + * the inputs and return `true` if the predicate is satisfied for all, + * any, or only one value `list.all(x, x < 10)`. + * * `filter` - test a predicate expression against the inputs and return + * the subset of elements which satisfy the predicate: + * `payments.filter(p, p > 1000)`. + * * `map` - apply an expression to all elements in the input and return the + * output aggregate type: `[1, 2, 3].map(i, i * i)`. + * + * The `has(m.x)` macro tests whether the property `x` is present in struct + * `m`. The semantics of this macro depend on the type of `m`. For proto2 + * messages `has(m.x)` is defined as 'defined, but not set`. For proto3, the + * macro tests whether the property is set to its default. For map and struct + * types, the macro tests whether the property `x` is defined on `m`. + */ +export interface Expr_Comprehension { + /** The name of the iteration variable. */ + iterVar: string; + /** The range over which var iterates. */ + iterRange: + | Expr + | undefined; + /** The name of the variable used for accumulation of the result. */ + accuVar: string; + /** The initial value of the accumulator. */ + accuInit: + | Expr + | undefined; + /** + * An expression which can contain iter_var and accu_var. + * + * Returns false when the result has been computed and may be used as + * a hint to short-circuit the remainder of the comprehension. + */ + loopCondition: + | Expr + | undefined; + /** + * An expression which can contain iter_var and accu_var. + * + * Computes the next value of accu_var. + */ + loopStep: + | Expr + | undefined; + /** + * An expression which can contain accu_var. + * + * Computes the result. + */ + result: Expr | undefined; +} + +/** + * Represents a primitive literal. + * + * Named 'Constant' here for backwards compatibility. + * + * This is similar as the primitives supported in the well-known type + * `google.protobuf.Value`, but richer so it can represent CEL's full range of + * primitives. + * + * Lists and structs are not included as constants as these aggregate types may + * contain [Expr][google.api.expr.v1alpha1.Expr] elements which require evaluation and are thus not constant. + * + * Examples of literals include: `"hello"`, `b'bytes'`, `1u`, `4.2`, `-2`, + * `true`, `null`. + */ +export interface Constant { + /** null value. */ + nullValue?: + | NullValue + | undefined; + /** boolean value. */ + boolValue?: + | boolean + | undefined; + /** int64 value. */ + int64Value?: + | number + | undefined; + /** uint64 value. */ + uint64Value?: + | number + | undefined; + /** double value. */ + doubleValue?: + | number + | undefined; + /** string value. */ + stringValue?: + | string + | undefined; + /** bytes value. */ + bytesValue?: + | Uint8Array + | undefined; + /** + * protobuf.Duration value. + * + * Deprecated: duration is no longer considered a builtin cel type. + * + * @deprecated + */ + durationValue?: + | Duration + | undefined; + /** + * protobuf.Timestamp value. + * + * Deprecated: timestamp is no longer considered a builtin cel type. + * + * @deprecated + */ + timestampValue?: Date | undefined; +} + +/** Source information collected at parse time. */ +export interface SourceInfo { + /** The syntax version of the source, e.g. `cel1`. */ + syntaxVersion: string; + /** + * The location name. All position information attached to an expression is + * relative to this location. + * + * The location could be a file, UI element, or similar. For example, + * `acme/app/AnvilPolicy.cel`. + */ + location: string; + /** + * Monotonically increasing list of code point offsets where newlines + * `\n` appear. + * + * The line number of a given position is the index `i` where for a given + * `id` the `line_offsets[i] < id_positions[id] < line_offsets[i+1]`. The + * column may be derivd from `id_positions[id] - line_offsets[i]`. + */ + lineOffsets: number[]; + /** + * A map from the parse node id (e.g. `Expr.id`) to the code point offset + * within the source. + */ + positions: { [key: number]: number }; + /** + * A map from the parse node id where a macro replacement was made to the + * call `Expr` that resulted in a macro expansion. + * + * For example, `has(value.field)` is a function call that is replaced by a + * `test_only` field selection in the AST. Likewise, the call + * `list.exists(e, e > 10)` translates to a comprehension expression. The key + * in the map corresponds to the expression id of the expanded macro, and the + * value is the call `Expr` that was replaced. + */ + macroCalls: { [key: number]: Expr }; +} + +export interface SourceInfo_PositionsEntry { + key: number; + value: number; +} + +export interface SourceInfo_MacroCallsEntry { + key: number; + value: Expr | undefined; +} + +/** A specific position in source. */ +export interface SourcePosition { + /** The soucre location name (e.g. file name). */ + location: string; + /** The UTF-8 code unit offset. */ + offset: number; + /** + * The 1-based index of the starting line in the source text + * where the issue occurs, or 0 if unknown. + */ + line: number; + /** + * The 0-based index of the starting position within the line of source text + * where the issue occurs. Only meaningful if line is nonzero. + */ + column: number; +} + +function createBaseParsedExpr(): ParsedExpr { + return { expr: undefined, sourceInfo: undefined }; +} + +export const ParsedExpr = { + encode(message: ParsedExpr, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.expr !== undefined) { + Expr.encode(message.expr, writer.uint32(18).fork()).ldelim(); + } + if (message.sourceInfo !== undefined) { + SourceInfo.encode(message.sourceInfo, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ParsedExpr { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParsedExpr(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + if (tag !== 18) { + break; + } + + message.expr = Expr.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.sourceInfo = SourceInfo.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ParsedExpr { + return { + expr: isSet(object.expr) ? Expr.fromJSON(object.expr) : undefined, + sourceInfo: isSet(object.sourceInfo) ? SourceInfo.fromJSON(object.sourceInfo) : undefined, + }; + }, + + toJSON(message: ParsedExpr): unknown { + const obj: any = {}; + if (message.expr !== undefined) { + obj.expr = Expr.toJSON(message.expr); + } + if (message.sourceInfo !== undefined) { + obj.sourceInfo = SourceInfo.toJSON(message.sourceInfo); + } + return obj; + }, + + create(base?: DeepPartial): ParsedExpr { + return ParsedExpr.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): ParsedExpr { + const message = createBaseParsedExpr(); + message.expr = (object.expr !== undefined && object.expr !== null) ? Expr.fromPartial(object.expr) : undefined; + message.sourceInfo = (object.sourceInfo !== undefined && object.sourceInfo !== null) + ? SourceInfo.fromPartial(object.sourceInfo) + : undefined; + return message; + }, +}; + +function createBaseExpr(): Expr { + return { + id: 0, + constExpr: undefined, + identExpr: undefined, + selectExpr: undefined, + callExpr: undefined, + listExpr: undefined, + structExpr: undefined, + comprehensionExpr: undefined, + }; +} + +export const Expr = { + encode(message: Expr, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(16).int64(message.id); + } + if (message.constExpr !== undefined) { + Constant.encode(message.constExpr, writer.uint32(26).fork()).ldelim(); + } + if (message.identExpr !== undefined) { + Expr_Ident.encode(message.identExpr, writer.uint32(34).fork()).ldelim(); + } + if (message.selectExpr !== undefined) { + Expr_Select.encode(message.selectExpr, writer.uint32(42).fork()).ldelim(); + } + if (message.callExpr !== undefined) { + Expr_Call.encode(message.callExpr, writer.uint32(50).fork()).ldelim(); + } + if (message.listExpr !== undefined) { + Expr_CreateList.encode(message.listExpr, writer.uint32(58).fork()).ldelim(); + } + if (message.structExpr !== undefined) { + Expr_CreateStruct.encode(message.structExpr, writer.uint32(66).fork()).ldelim(); + } + if (message.comprehensionExpr !== undefined) { + Expr_Comprehension.encode(message.comprehensionExpr, writer.uint32(74).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Expr { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExpr(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + if (tag !== 16) { + break; + } + + message.id = longToNumber(reader.int64() as Long); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.constExpr = Constant.decode(reader, reader.uint32()); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.identExpr = Expr_Ident.decode(reader, reader.uint32()); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.selectExpr = Expr_Select.decode(reader, reader.uint32()); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.callExpr = Expr_Call.decode(reader, reader.uint32()); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.listExpr = Expr_CreateList.decode(reader, reader.uint32()); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.structExpr = Expr_CreateStruct.decode(reader, reader.uint32()); + continue; + case 9: + if (tag !== 74) { + break; + } + + message.comprehensionExpr = Expr_Comprehension.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Expr { + return { + id: isSet(object.id) ? Number(object.id) : 0, + constExpr: isSet(object.constExpr) ? Constant.fromJSON(object.constExpr) : undefined, + identExpr: isSet(object.identExpr) ? Expr_Ident.fromJSON(object.identExpr) : undefined, + selectExpr: isSet(object.selectExpr) ? Expr_Select.fromJSON(object.selectExpr) : undefined, + callExpr: isSet(object.callExpr) ? Expr_Call.fromJSON(object.callExpr) : undefined, + listExpr: isSet(object.listExpr) ? Expr_CreateList.fromJSON(object.listExpr) : undefined, + structExpr: isSet(object.structExpr) ? Expr_CreateStruct.fromJSON(object.structExpr) : undefined, + comprehensionExpr: isSet(object.comprehensionExpr) + ? Expr_Comprehension.fromJSON(object.comprehensionExpr) + : undefined, + }; + }, + + toJSON(message: Expr): unknown { + const obj: any = {}; + if (message.id !== 0) { + obj.id = Math.round(message.id); + } + if (message.constExpr !== undefined) { + obj.constExpr = Constant.toJSON(message.constExpr); + } + if (message.identExpr !== undefined) { + obj.identExpr = Expr_Ident.toJSON(message.identExpr); + } + if (message.selectExpr !== undefined) { + obj.selectExpr = Expr_Select.toJSON(message.selectExpr); + } + if (message.callExpr !== undefined) { + obj.callExpr = Expr_Call.toJSON(message.callExpr); + } + if (message.listExpr !== undefined) { + obj.listExpr = Expr_CreateList.toJSON(message.listExpr); + } + if (message.structExpr !== undefined) { + obj.structExpr = Expr_CreateStruct.toJSON(message.structExpr); + } + if (message.comprehensionExpr !== undefined) { + obj.comprehensionExpr = Expr_Comprehension.toJSON(message.comprehensionExpr); + } + return obj; + }, + + create(base?: DeepPartial): Expr { + return Expr.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Expr { + const message = createBaseExpr(); + message.id = object.id ?? 0; + message.constExpr = (object.constExpr !== undefined && object.constExpr !== null) + ? Constant.fromPartial(object.constExpr) + : undefined; + message.identExpr = (object.identExpr !== undefined && object.identExpr !== null) + ? Expr_Ident.fromPartial(object.identExpr) + : undefined; + message.selectExpr = (object.selectExpr !== undefined && object.selectExpr !== null) + ? Expr_Select.fromPartial(object.selectExpr) + : undefined; + message.callExpr = (object.callExpr !== undefined && object.callExpr !== null) + ? Expr_Call.fromPartial(object.callExpr) + : undefined; + message.listExpr = (object.listExpr !== undefined && object.listExpr !== null) + ? Expr_CreateList.fromPartial(object.listExpr) + : undefined; + message.structExpr = (object.structExpr !== undefined && object.structExpr !== null) + ? Expr_CreateStruct.fromPartial(object.structExpr) + : undefined; + message.comprehensionExpr = (object.comprehensionExpr !== undefined && object.comprehensionExpr !== null) + ? Expr_Comprehension.fromPartial(object.comprehensionExpr) + : undefined; + return message; + }, +}; + +function createBaseExpr_Ident(): Expr_Ident { + return { name: "" }; +} + +export const Expr_Ident = { + encode(message: Expr_Ident, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Expr_Ident { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExpr_Ident(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Expr_Ident { + return { name: isSet(object.name) ? String(object.name) : "" }; + }, + + toJSON(message: Expr_Ident): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + return obj; + }, + + create(base?: DeepPartial): Expr_Ident { + return Expr_Ident.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Expr_Ident { + const message = createBaseExpr_Ident(); + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseExpr_Select(): Expr_Select { + return { operand: undefined, field: "", testOnly: false }; +} + +export const Expr_Select = { + encode(message: Expr_Select, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.operand !== undefined) { + Expr.encode(message.operand, writer.uint32(10).fork()).ldelim(); + } + if (message.field !== "") { + writer.uint32(18).string(message.field); + } + if (message.testOnly === true) { + writer.uint32(24).bool(message.testOnly); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Expr_Select { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExpr_Select(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.operand = Expr.decode(reader, reader.uint32()); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.field = reader.string(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.testOnly = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Expr_Select { + return { + operand: isSet(object.operand) ? Expr.fromJSON(object.operand) : undefined, + field: isSet(object.field) ? String(object.field) : "", + testOnly: isSet(object.testOnly) ? Boolean(object.testOnly) : false, + }; + }, + + toJSON(message: Expr_Select): unknown { + const obj: any = {}; + if (message.operand !== undefined) { + obj.operand = Expr.toJSON(message.operand); + } + if (message.field !== "") { + obj.field = message.field; + } + if (message.testOnly === true) { + obj.testOnly = message.testOnly; + } + return obj; + }, + + create(base?: DeepPartial): Expr_Select { + return Expr_Select.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Expr_Select { + const message = createBaseExpr_Select(); + message.operand = (object.operand !== undefined && object.operand !== null) + ? Expr.fromPartial(object.operand) + : undefined; + message.field = object.field ?? ""; + message.testOnly = object.testOnly ?? false; + return message; + }, +}; + +function createBaseExpr_Call(): Expr_Call { + return { target: undefined, function: "", args: [] }; +} + +export const Expr_Call = { + encode(message: Expr_Call, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.target !== undefined) { + Expr.encode(message.target, writer.uint32(10).fork()).ldelim(); + } + if (message.function !== "") { + writer.uint32(18).string(message.function); + } + for (const v of message.args) { + Expr.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Expr_Call { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExpr_Call(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.target = Expr.decode(reader, reader.uint32()); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.function = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.args.push(Expr.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Expr_Call { + return { + target: isSet(object.target) ? Expr.fromJSON(object.target) : undefined, + function: isSet(object.function) ? String(object.function) : "", + args: Array.isArray(object?.args) ? object.args.map((e: any) => Expr.fromJSON(e)) : [], + }; + }, + + toJSON(message: Expr_Call): unknown { + const obj: any = {}; + if (message.target !== undefined) { + obj.target = Expr.toJSON(message.target); + } + if (message.function !== "") { + obj.function = message.function; + } + if (message.args?.length) { + obj.args = message.args.map((e) => Expr.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): Expr_Call { + return Expr_Call.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Expr_Call { + const message = createBaseExpr_Call(); + message.target = (object.target !== undefined && object.target !== null) + ? Expr.fromPartial(object.target) + : undefined; + message.function = object.function ?? ""; + message.args = object.args?.map((e) => Expr.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseExpr_CreateList(): Expr_CreateList { + return { elements: [] }; +} + +export const Expr_CreateList = { + encode(message: Expr_CreateList, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.elements) { + Expr.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Expr_CreateList { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExpr_CreateList(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.elements.push(Expr.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Expr_CreateList { + return { elements: Array.isArray(object?.elements) ? object.elements.map((e: any) => Expr.fromJSON(e)) : [] }; + }, + + toJSON(message: Expr_CreateList): unknown { + const obj: any = {}; + if (message.elements?.length) { + obj.elements = message.elements.map((e) => Expr.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): Expr_CreateList { + return Expr_CreateList.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Expr_CreateList { + const message = createBaseExpr_CreateList(); + message.elements = object.elements?.map((e) => Expr.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseExpr_CreateStruct(): Expr_CreateStruct { + return { messageName: "", entries: [] }; +} + +export const Expr_CreateStruct = { + encode(message: Expr_CreateStruct, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageName !== "") { + writer.uint32(10).string(message.messageName); + } + for (const v of message.entries) { + Expr_CreateStruct_Entry.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Expr_CreateStruct { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExpr_CreateStruct(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.messageName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.entries.push(Expr_CreateStruct_Entry.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Expr_CreateStruct { + return { + messageName: isSet(object.messageName) ? String(object.messageName) : "", + entries: Array.isArray(object?.entries) + ? object.entries.map((e: any) => Expr_CreateStruct_Entry.fromJSON(e)) + : [], + }; + }, + + toJSON(message: Expr_CreateStruct): unknown { + const obj: any = {}; + if (message.messageName !== "") { + obj.messageName = message.messageName; + } + if (message.entries?.length) { + obj.entries = message.entries.map((e) => Expr_CreateStruct_Entry.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): Expr_CreateStruct { + return Expr_CreateStruct.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Expr_CreateStruct { + const message = createBaseExpr_CreateStruct(); + message.messageName = object.messageName ?? ""; + message.entries = object.entries?.map((e) => Expr_CreateStruct_Entry.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseExpr_CreateStruct_Entry(): Expr_CreateStruct_Entry { + return { id: 0, fieldKey: undefined, mapKey: undefined, value: undefined }; +} + +export const Expr_CreateStruct_Entry = { + encode(message: Expr_CreateStruct_Entry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(8).int64(message.id); + } + if (message.fieldKey !== undefined) { + writer.uint32(18).string(message.fieldKey); + } + if (message.mapKey !== undefined) { + Expr.encode(message.mapKey, writer.uint32(26).fork()).ldelim(); + } + if (message.value !== undefined) { + Expr.encode(message.value, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Expr_CreateStruct_Entry { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExpr_CreateStruct_Entry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.id = longToNumber(reader.int64() as Long); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.fieldKey = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.mapKey = Expr.decode(reader, reader.uint32()); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.value = Expr.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Expr_CreateStruct_Entry { + return { + id: isSet(object.id) ? Number(object.id) : 0, + fieldKey: isSet(object.fieldKey) ? String(object.fieldKey) : undefined, + mapKey: isSet(object.mapKey) ? Expr.fromJSON(object.mapKey) : undefined, + value: isSet(object.value) ? Expr.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: Expr_CreateStruct_Entry): unknown { + const obj: any = {}; + if (message.id !== 0) { + obj.id = Math.round(message.id); + } + if (message.fieldKey !== undefined) { + obj.fieldKey = message.fieldKey; + } + if (message.mapKey !== undefined) { + obj.mapKey = Expr.toJSON(message.mapKey); + } + if (message.value !== undefined) { + obj.value = Expr.toJSON(message.value); + } + return obj; + }, + + create(base?: DeepPartial): Expr_CreateStruct_Entry { + return Expr_CreateStruct_Entry.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Expr_CreateStruct_Entry { + const message = createBaseExpr_CreateStruct_Entry(); + message.id = object.id ?? 0; + message.fieldKey = object.fieldKey ?? undefined; + message.mapKey = (object.mapKey !== undefined && object.mapKey !== null) + ? Expr.fromPartial(object.mapKey) + : undefined; + message.value = (object.value !== undefined && object.value !== null) ? Expr.fromPartial(object.value) : undefined; + return message; + }, +}; + +function createBaseExpr_Comprehension(): Expr_Comprehension { + return { + iterVar: "", + iterRange: undefined, + accuVar: "", + accuInit: undefined, + loopCondition: undefined, + loopStep: undefined, + result: undefined, + }; +} + +export const Expr_Comprehension = { + encode(message: Expr_Comprehension, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.iterVar !== "") { + writer.uint32(10).string(message.iterVar); + } + if (message.iterRange !== undefined) { + Expr.encode(message.iterRange, writer.uint32(18).fork()).ldelim(); + } + if (message.accuVar !== "") { + writer.uint32(26).string(message.accuVar); + } + if (message.accuInit !== undefined) { + Expr.encode(message.accuInit, writer.uint32(34).fork()).ldelim(); + } + if (message.loopCondition !== undefined) { + Expr.encode(message.loopCondition, writer.uint32(42).fork()).ldelim(); + } + if (message.loopStep !== undefined) { + Expr.encode(message.loopStep, writer.uint32(50).fork()).ldelim(); + } + if (message.result !== undefined) { + Expr.encode(message.result, writer.uint32(58).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Expr_Comprehension { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExpr_Comprehension(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.iterVar = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.iterRange = Expr.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.accuVar = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.accuInit = Expr.decode(reader, reader.uint32()); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.loopCondition = Expr.decode(reader, reader.uint32()); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.loopStep = Expr.decode(reader, reader.uint32()); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.result = Expr.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Expr_Comprehension { + return { + iterVar: isSet(object.iterVar) ? String(object.iterVar) : "", + iterRange: isSet(object.iterRange) ? Expr.fromJSON(object.iterRange) : undefined, + accuVar: isSet(object.accuVar) ? String(object.accuVar) : "", + accuInit: isSet(object.accuInit) ? Expr.fromJSON(object.accuInit) : undefined, + loopCondition: isSet(object.loopCondition) ? Expr.fromJSON(object.loopCondition) : undefined, + loopStep: isSet(object.loopStep) ? Expr.fromJSON(object.loopStep) : undefined, + result: isSet(object.result) ? Expr.fromJSON(object.result) : undefined, + }; + }, + + toJSON(message: Expr_Comprehension): unknown { + const obj: any = {}; + if (message.iterVar !== "") { + obj.iterVar = message.iterVar; + } + if (message.iterRange !== undefined) { + obj.iterRange = Expr.toJSON(message.iterRange); + } + if (message.accuVar !== "") { + obj.accuVar = message.accuVar; + } + if (message.accuInit !== undefined) { + obj.accuInit = Expr.toJSON(message.accuInit); + } + if (message.loopCondition !== undefined) { + obj.loopCondition = Expr.toJSON(message.loopCondition); + } + if (message.loopStep !== undefined) { + obj.loopStep = Expr.toJSON(message.loopStep); + } + if (message.result !== undefined) { + obj.result = Expr.toJSON(message.result); + } + return obj; + }, + + create(base?: DeepPartial): Expr_Comprehension { + return Expr_Comprehension.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Expr_Comprehension { + const message = createBaseExpr_Comprehension(); + message.iterVar = object.iterVar ?? ""; + message.iterRange = (object.iterRange !== undefined && object.iterRange !== null) + ? Expr.fromPartial(object.iterRange) + : undefined; + message.accuVar = object.accuVar ?? ""; + message.accuInit = (object.accuInit !== undefined && object.accuInit !== null) + ? Expr.fromPartial(object.accuInit) + : undefined; + message.loopCondition = (object.loopCondition !== undefined && object.loopCondition !== null) + ? Expr.fromPartial(object.loopCondition) + : undefined; + message.loopStep = (object.loopStep !== undefined && object.loopStep !== null) + ? Expr.fromPartial(object.loopStep) + : undefined; + message.result = (object.result !== undefined && object.result !== null) + ? Expr.fromPartial(object.result) + : undefined; + return message; + }, +}; + +function createBaseConstant(): Constant { + return { + nullValue: undefined, + boolValue: undefined, + int64Value: undefined, + uint64Value: undefined, + doubleValue: undefined, + stringValue: undefined, + bytesValue: undefined, + durationValue: undefined, + timestampValue: undefined, + }; +} + +export const Constant = { + encode(message: Constant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nullValue !== undefined) { + writer.uint32(8).int32(nullValueToNumber(message.nullValue)); + } + if (message.boolValue !== undefined) { + writer.uint32(16).bool(message.boolValue); + } + if (message.int64Value !== undefined) { + writer.uint32(24).int64(message.int64Value); + } + if (message.uint64Value !== undefined) { + writer.uint32(32).uint64(message.uint64Value); + } + if (message.doubleValue !== undefined) { + writer.uint32(41).double(message.doubleValue); + } + if (message.stringValue !== undefined) { + writer.uint32(50).string(message.stringValue); + } + if (message.bytesValue !== undefined) { + writer.uint32(58).bytes(message.bytesValue); + } + if (message.durationValue !== undefined) { + Duration.encode(message.durationValue, writer.uint32(66).fork()).ldelim(); + } + if (message.timestampValue !== undefined) { + Timestamp.encode(toTimestamp(message.timestampValue), writer.uint32(74).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Constant { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConstant(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.nullValue = nullValueFromJSON(reader.int32()); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.boolValue = reader.bool(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.int64Value = longToNumber(reader.int64() as Long); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.uint64Value = longToNumber(reader.uint64() as Long); + continue; + case 5: + if (tag !== 41) { + break; + } + + message.doubleValue = reader.double(); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.stringValue = reader.string(); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.bytesValue = reader.bytes(); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.durationValue = Duration.decode(reader, reader.uint32()); + continue; + case 9: + if (tag !== 74) { + break; + } + + message.timestampValue = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Constant { + return { + nullValue: isSet(object.nullValue) ? nullValueFromJSON(object.nullValue) : undefined, + boolValue: isSet(object.boolValue) ? Boolean(object.boolValue) : undefined, + int64Value: isSet(object.int64Value) ? Number(object.int64Value) : undefined, + uint64Value: isSet(object.uint64Value) ? Number(object.uint64Value) : undefined, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : undefined, + stringValue: isSet(object.stringValue) ? String(object.stringValue) : undefined, + bytesValue: isSet(object.bytesValue) ? bytesFromBase64(object.bytesValue) : undefined, + durationValue: isSet(object.durationValue) ? Duration.fromJSON(object.durationValue) : undefined, + timestampValue: isSet(object.timestampValue) ? fromJsonTimestamp(object.timestampValue) : undefined, + }; + }, + + toJSON(message: Constant): unknown { + const obj: any = {}; + if (message.nullValue !== undefined) { + obj.nullValue = nullValueToJSON(message.nullValue); + } + if (message.boolValue !== undefined) { + obj.boolValue = message.boolValue; + } + if (message.int64Value !== undefined) { + obj.int64Value = Math.round(message.int64Value); + } + if (message.uint64Value !== undefined) { + obj.uint64Value = Math.round(message.uint64Value); + } + if (message.doubleValue !== undefined) { + obj.doubleValue = message.doubleValue; + } + if (message.stringValue !== undefined) { + obj.stringValue = message.stringValue; + } + if (message.bytesValue !== undefined) { + obj.bytesValue = base64FromBytes(message.bytesValue); + } + if (message.durationValue !== undefined) { + obj.durationValue = Duration.toJSON(message.durationValue); + } + if (message.timestampValue !== undefined) { + obj.timestampValue = message.timestampValue.toISOString(); + } + return obj; + }, + + create(base?: DeepPartial): Constant { + return Constant.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Constant { + const message = createBaseConstant(); + message.nullValue = object.nullValue ?? undefined; + message.boolValue = object.boolValue ?? undefined; + message.int64Value = object.int64Value ?? undefined; + message.uint64Value = object.uint64Value ?? undefined; + message.doubleValue = object.doubleValue ?? undefined; + message.stringValue = object.stringValue ?? undefined; + message.bytesValue = object.bytesValue ?? undefined; + message.durationValue = (object.durationValue !== undefined && object.durationValue !== null) + ? Duration.fromPartial(object.durationValue) + : undefined; + message.timestampValue = object.timestampValue ?? undefined; + return message; + }, +}; + +function createBaseSourceInfo(): SourceInfo { + return { syntaxVersion: "", location: "", lineOffsets: [], positions: {}, macroCalls: {} }; +} + +export const SourceInfo = { + encode(message: SourceInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.syntaxVersion !== "") { + writer.uint32(10).string(message.syntaxVersion); + } + if (message.location !== "") { + writer.uint32(18).string(message.location); + } + writer.uint32(26).fork(); + for (const v of message.lineOffsets) { + writer.int32(v); + } + writer.ldelim(); + Object.entries(message.positions).forEach(([key, value]) => { + SourceInfo_PositionsEntry.encode({ key: key as any, value }, writer.uint32(34).fork()).ldelim(); + }); + Object.entries(message.macroCalls).forEach(([key, value]) => { + SourceInfo_MacroCallsEntry.encode({ key: key as any, value }, writer.uint32(42).fork()).ldelim(); + }); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceInfo { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.syntaxVersion = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.location = reader.string(); + continue; + case 3: + if (tag === 24) { + message.lineOffsets.push(reader.int32()); + + continue; + } + + if (tag === 26) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.lineOffsets.push(reader.int32()); + } + + continue; + } + + break; + case 4: + if (tag !== 34) { + break; + } + + const entry4 = SourceInfo_PositionsEntry.decode(reader, reader.uint32()); + if (entry4.value !== undefined) { + message.positions[entry4.key] = entry4.value; + } + continue; + case 5: + if (tag !== 42) { + break; + } + + const entry5 = SourceInfo_MacroCallsEntry.decode(reader, reader.uint32()); + if (entry5.value !== undefined) { + message.macroCalls[entry5.key] = entry5.value; + } + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceInfo { + return { + syntaxVersion: isSet(object.syntaxVersion) ? String(object.syntaxVersion) : "", + location: isSet(object.location) ? String(object.location) : "", + lineOffsets: Array.isArray(object?.lineOffsets) ? object.lineOffsets.map((e: any) => Number(e)) : [], + positions: isObject(object.positions) + ? Object.entries(object.positions).reduce<{ [key: number]: number }>((acc, [key, value]) => { + acc[Number(key)] = Number(value); + return acc; + }, {}) + : {}, + macroCalls: isObject(object.macroCalls) + ? Object.entries(object.macroCalls).reduce<{ [key: number]: Expr }>((acc, [key, value]) => { + acc[Number(key)] = Expr.fromJSON(value); + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: SourceInfo): unknown { + const obj: any = {}; + if (message.syntaxVersion !== "") { + obj.syntaxVersion = message.syntaxVersion; + } + if (message.location !== "") { + obj.location = message.location; + } + if (message.lineOffsets?.length) { + obj.lineOffsets = message.lineOffsets.map((e) => Math.round(e)); + } + if (message.positions) { + const entries = Object.entries(message.positions); + if (entries.length > 0) { + obj.positions = {}; + entries.forEach(([k, v]) => { + obj.positions[k] = Math.round(v); + }); + } + } + if (message.macroCalls) { + const entries = Object.entries(message.macroCalls); + if (entries.length > 0) { + obj.macroCalls = {}; + entries.forEach(([k, v]) => { + obj.macroCalls[k] = Expr.toJSON(v); + }); + } + } + return obj; + }, + + create(base?: DeepPartial): SourceInfo { + return SourceInfo.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): SourceInfo { + const message = createBaseSourceInfo(); + message.syntaxVersion = object.syntaxVersion ?? ""; + message.location = object.location ?? ""; + message.lineOffsets = object.lineOffsets?.map((e) => e) || []; + message.positions = Object.entries(object.positions ?? {}).reduce<{ [key: number]: number }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[Number(key)] = Number(value); + } + return acc; + }, + {}, + ); + message.macroCalls = Object.entries(object.macroCalls ?? {}).reduce<{ [key: number]: Expr }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[Number(key)] = Expr.fromPartial(value); + } + return acc; + }, + {}, + ); + return message; + }, +}; + +function createBaseSourceInfo_PositionsEntry(): SourceInfo_PositionsEntry { + return { key: 0, value: 0 }; +} + +export const SourceInfo_PositionsEntry = { + encode(message: SourceInfo_PositionsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== 0) { + writer.uint32(8).int64(message.key); + } + if (message.value !== 0) { + writer.uint32(16).int32(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceInfo_PositionsEntry { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceInfo_PositionsEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.key = longToNumber(reader.int64() as Long); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.value = reader.int32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceInfo_PositionsEntry { + return { key: isSet(object.key) ? Number(object.key) : 0, value: isSet(object.value) ? Number(object.value) : 0 }; + }, + + toJSON(message: SourceInfo_PositionsEntry): unknown { + const obj: any = {}; + if (message.key !== 0) { + obj.key = Math.round(message.key); + } + if (message.value !== 0) { + obj.value = Math.round(message.value); + } + return obj; + }, + + create(base?: DeepPartial): SourceInfo_PositionsEntry { + return SourceInfo_PositionsEntry.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): SourceInfo_PositionsEntry { + const message = createBaseSourceInfo_PositionsEntry(); + message.key = object.key ?? 0; + message.value = object.value ?? 0; + return message; + }, +}; + +function createBaseSourceInfo_MacroCallsEntry(): SourceInfo_MacroCallsEntry { + return { key: 0, value: undefined }; +} + +export const SourceInfo_MacroCallsEntry = { + encode(message: SourceInfo_MacroCallsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== 0) { + writer.uint32(8).int64(message.key); + } + if (message.value !== undefined) { + Expr.encode(message.value, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceInfo_MacroCallsEntry { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceInfo_MacroCallsEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.key = longToNumber(reader.int64() as Long); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.value = Expr.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceInfo_MacroCallsEntry { + return { + key: isSet(object.key) ? Number(object.key) : 0, + value: isSet(object.value) ? Expr.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: SourceInfo_MacroCallsEntry): unknown { + const obj: any = {}; + if (message.key !== 0) { + obj.key = Math.round(message.key); + } + if (message.value !== undefined) { + obj.value = Expr.toJSON(message.value); + } + return obj; + }, + + create(base?: DeepPartial): SourceInfo_MacroCallsEntry { + return SourceInfo_MacroCallsEntry.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): SourceInfo_MacroCallsEntry { + const message = createBaseSourceInfo_MacroCallsEntry(); + message.key = object.key ?? 0; + message.value = (object.value !== undefined && object.value !== null) ? Expr.fromPartial(object.value) : undefined; + return message; + }, +}; + +function createBaseSourcePosition(): SourcePosition { + return { location: "", offset: 0, line: 0, column: 0 }; +} + +export const SourcePosition = { + encode(message: SourcePosition, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.location !== "") { + writer.uint32(10).string(message.location); + } + if (message.offset !== 0) { + writer.uint32(16).int32(message.offset); + } + if (message.line !== 0) { + writer.uint32(24).int32(message.line); + } + if (message.column !== 0) { + writer.uint32(32).int32(message.column); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourcePosition { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourcePosition(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.location = reader.string(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.offset = reader.int32(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.line = reader.int32(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.column = reader.int32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourcePosition { + return { + location: isSet(object.location) ? String(object.location) : "", + offset: isSet(object.offset) ? Number(object.offset) : 0, + line: isSet(object.line) ? Number(object.line) : 0, + column: isSet(object.column) ? Number(object.column) : 0, + }; + }, + + toJSON(message: SourcePosition): unknown { + const obj: any = {}; + if (message.location !== "") { + obj.location = message.location; + } + if (message.offset !== 0) { + obj.offset = Math.round(message.offset); + } + if (message.line !== 0) { + obj.line = Math.round(message.line); + } + if (message.column !== 0) { + obj.column = Math.round(message.column); + } + return obj; + }, + + create(base?: DeepPartial): SourcePosition { + return SourcePosition.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): SourcePosition { + const message = createBaseSourcePosition(); + message.location = object.location ?? ""; + message.offset = object.offset ?? 0; + message.line = object.line ?? 0; + message.column = object.column ?? 0; + return message; + }, +}; + +export interface DataLoaderOptions { + cache?: boolean; +} + +export interface DataLoaders { + rpcDataLoaderOptions?: DataLoaderOptions; + getDataLoader(identifier: string, constructorFn: () => T): T; +} + +declare const self: any | undefined; +declare const window: any | undefined; +declare const global: any | undefined; +const tsProtoGlobalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function toTimestamp(date: Date): Timestamp { + const seconds = date.getTime() / 1_000; + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = (t.seconds || 0) * 1_000; + millis += (t.nanos || 0) / 1_000_000; + return new Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof Date) { + return o; + } else if (typeof o === "string") { + return new Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isObject(value: any): boolean { + return typeof value === "object" && value !== null; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/components/spicedb/typescript/src/google/protobuf/duration.pb.ts b/components/spicedb/typescript/src/google/protobuf/duration.pb.ts new file mode 100644 index 00000000000000..47da9c5e7ff62d --- /dev/null +++ b/components/spicedb/typescript/src/google/protobuf/duration.pb.ts @@ -0,0 +1,214 @@ +/** + * Copyright (c) 2023 Gitpod GmbH. All rights reserved. + * Licensed under the GNU Affero General Public License (AGPL). + * See License.AGPL.txt in the project root for license information. + */ + +/* eslint-disable */ +import * as _m0 from "protobufjs/minimal"; +import Long = require("long"); + +export const protobufPackage = "google.protobuf"; + +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (duration.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ +export interface Duration { + /** + * Signed seconds of the span of time. Must be from -315,576,000,000 + * to +315,576,000,000 inclusive. Note: these bounds are computed from: + * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + */ + seconds: number; + /** + * Signed fractions of a second at nanosecond resolution of the span + * of time. Durations less than one second are represented with a 0 + * `seconds` field and a positive or negative `nanos` field. For durations + * of one second or more, a non-zero value for the `nanos` field must be + * of the same sign as the `seconds` field. Must be from -999,999,999 + * to +999,999,999 inclusive. + */ + nanos: number; +} + +function createBaseDuration(): Duration { + return { seconds: 0, nanos: 0 }; +} + +export const Duration = { + encode(message: Duration, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.seconds !== 0) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Duration { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDuration(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.seconds = longToNumber(reader.int64() as Long); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.nanos = reader.int32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Duration { + return { + seconds: isSet(object.seconds) ? Number(object.seconds) : 0, + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + + toJSON(message: Duration): unknown { + const obj: any = {}; + if (message.seconds !== 0) { + obj.seconds = Math.round(message.seconds); + } + if (message.nanos !== 0) { + obj.nanos = Math.round(message.nanos); + } + return obj; + }, + + create(base?: DeepPartial): Duration { + return Duration.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Duration { + const message = createBaseDuration(); + message.seconds = object.seconds ?? 0; + message.nanos = object.nanos ?? 0; + return message; + }, +}; + +export interface DataLoaderOptions { + cache?: boolean; +} + +export interface DataLoaders { + rpcDataLoaderOptions?: DataLoaderOptions; + getDataLoader(identifier: string, constructorFn: () => T): T; +} + +declare const self: any | undefined; +declare const window: any | undefined; +declare const global: any | undefined; +const tsProtoGlobalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/components/spicedb/typescript/src/google/protobuf/empty.pb.ts b/components/spicedb/typescript/src/google/protobuf/empty.pb.ts new file mode 100644 index 00000000000000..3db639eb4dc3fe --- /dev/null +++ b/components/spicedb/typescript/src/google/protobuf/empty.pb.ts @@ -0,0 +1,81 @@ +/** + * Copyright (c) 2023 Gitpod GmbH. All rights reserved. + * Licensed under the GNU Affero General Public License (AGPL). + * See License.AGPL.txt in the project root for license information. + */ + +/* eslint-disable */ +import * as _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * A generic empty message that you can re-use to avoid defining duplicated + * empty messages in your APIs. A typical example is to use it as the request + * or the response type of an API method. For instance: + * + * service Foo { + * rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + * } + */ +export interface Empty { +} + +function createBaseEmpty(): Empty { + return {}; +} + +export const Empty = { + encode(_: Empty, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Empty { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEmpty(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(_: any): Empty { + return {}; + }, + + toJSON(_: Empty): unknown { + const obj: any = {}; + return obj; + }, + + create(base?: DeepPartial): Empty { + return Empty.fromPartial(base ?? {}); + }, + fromPartial(_: DeepPartial): Empty { + const message = createBaseEmpty(); + return message; + }, +}; + +export interface DataLoaderOptions { + cache?: boolean; +} + +export interface DataLoaders { + rpcDataLoaderOptions?: DataLoaderOptions; + getDataLoader(identifier: string, constructorFn: () => T): T; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; diff --git a/components/spicedb/typescript/src/google/protobuf/struct.pb.ts b/components/spicedb/typescript/src/google/protobuf/struct.pb.ts new file mode 100644 index 00000000000000..ea1d9cdb1fcc6f --- /dev/null +++ b/components/spicedb/typescript/src/google/protobuf/struct.pb.ts @@ -0,0 +1,564 @@ +/** + * Copyright (c) 2023 Gitpod GmbH. All rights reserved. + * Licensed under the GNU Affero General Public License (AGPL). + * See License.AGPL.txt in the project root for license information. + */ + +/* eslint-disable */ +import * as _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * `NullValue` is a singleton enumeration to represent the null value for the + * `Value` type union. + * + * The JSON representation for `NullValue` is JSON `null`. + */ +export enum NullValue { + /** NULL_VALUE - Null value. */ + NULL_VALUE = "NULL_VALUE", + UNRECOGNIZED = "UNRECOGNIZED", +} + +export function nullValueFromJSON(object: any): NullValue { + switch (object) { + case 0: + case "NULL_VALUE": + return NullValue.NULL_VALUE; + case -1: + case "UNRECOGNIZED": + default: + return NullValue.UNRECOGNIZED; + } +} + +export function nullValueToJSON(object: NullValue): string { + switch (object) { + case NullValue.NULL_VALUE: + return "NULL_VALUE"; + case NullValue.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function nullValueToNumber(object: NullValue): number { + switch (object) { + case NullValue.NULL_VALUE: + return 0; + case NullValue.UNRECOGNIZED: + default: + return -1; + } +} + +/** + * `Struct` represents a structured data value, consisting of fields + * which map to dynamically typed values. In some languages, `Struct` + * might be supported by a native representation. For example, in + * scripting languages like JS a struct is represented as an + * object. The details of that representation are described together + * with the proto support for the language. + * + * The JSON representation for `Struct` is JSON object. + */ +export interface Struct { + /** Unordered map of dynamically typed values. */ + fields: { [key: string]: any | undefined }; +} + +export interface Struct_FieldsEntry { + key: string; + value: any | undefined; +} + +/** + * `Value` represents a dynamically typed value which can be either + * null, a number, a string, a boolean, a recursive struct value, or a + * list of values. A producer of value is expected to set one of these + * variants. Absence of any variant indicates an error. + * + * The JSON representation for `Value` is JSON value. + */ +export interface Value { + /** Represents a null value. */ + nullValue?: + | NullValue + | undefined; + /** Represents a double value. */ + numberValue?: + | number + | undefined; + /** Represents a string value. */ + stringValue?: + | string + | undefined; + /** Represents a boolean value. */ + boolValue?: + | boolean + | undefined; + /** Represents a structured value. */ + structValue?: + | { [key: string]: any } + | undefined; + /** Represents a repeated `Value`. */ + listValue?: Array | undefined; +} + +/** + * `ListValue` is a wrapper around a repeated field of values. + * + * The JSON representation for `ListValue` is JSON array. + */ +export interface ListValue { + /** Repeated field of dynamically typed values. */ + values: any[]; +} + +function createBaseStruct(): Struct { + return { fields: {} }; +} + +export const Struct = { + encode(message: Struct, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + Object.entries(message.fields).forEach(([key, value]) => { + if (value !== undefined) { + Struct_FieldsEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).ldelim(); + } + }); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Struct { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStruct(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + const entry1 = Struct_FieldsEntry.decode(reader, reader.uint32()); + if (entry1.value !== undefined) { + message.fields[entry1.key] = entry1.value; + } + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Struct { + return { + fields: isObject(object.fields) + ? Object.entries(object.fields).reduce<{ [key: string]: any | undefined }>((acc, [key, value]) => { + acc[key] = value as any | undefined; + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: Struct): unknown { + const obj: any = {}; + if (message.fields) { + const entries = Object.entries(message.fields); + if (entries.length > 0) { + obj.fields = {}; + entries.forEach(([k, v]) => { + obj.fields[k] = v; + }); + } + } + return obj; + }, + + create(base?: DeepPartial): Struct { + return Struct.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Struct { + const message = createBaseStruct(); + message.fields = Object.entries(object.fields ?? {}).reduce<{ [key: string]: any | undefined }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = value; + } + return acc; + }, + {}, + ); + return message; + }, + + wrap(object: { [key: string]: any } | undefined): Struct { + const struct = createBaseStruct(); + if (object !== undefined) { + Object.keys(object).forEach((key) => { + struct.fields[key] = object[key]; + }); + } + return struct; + }, + + unwrap(message: Struct): { [key: string]: any } { + const object: { [key: string]: any } = {}; + if (message.fields) { + Object.keys(message.fields).forEach((key) => { + object[key] = message.fields[key]; + }); + } + return object; + }, +}; + +function createBaseStruct_FieldsEntry(): Struct_FieldsEntry { + return { key: "", value: undefined }; +} + +export const Struct_FieldsEntry = { + encode(message: Struct_FieldsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + Value.encode(Value.wrap(message.value), writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Struct_FieldsEntry { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStruct_FieldsEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.value = Value.unwrap(Value.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Struct_FieldsEntry { + return { key: isSet(object.key) ? String(object.key) : "", value: isSet(object?.value) ? object.value : undefined }; + }, + + toJSON(message: Struct_FieldsEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = message.value; + } + return obj; + }, + + create(base?: DeepPartial): Struct_FieldsEntry { + return Struct_FieldsEntry.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Struct_FieldsEntry { + const message = createBaseStruct_FieldsEntry(); + message.key = object.key ?? ""; + message.value = object.value ?? undefined; + return message; + }, +}; + +function createBaseValue(): Value { + return { + nullValue: undefined, + numberValue: undefined, + stringValue: undefined, + boolValue: undefined, + structValue: undefined, + listValue: undefined, + }; +} + +export const Value = { + encode(message: Value, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nullValue !== undefined) { + writer.uint32(8).int32(nullValueToNumber(message.nullValue)); + } + if (message.numberValue !== undefined) { + writer.uint32(17).double(message.numberValue); + } + if (message.stringValue !== undefined) { + writer.uint32(26).string(message.stringValue); + } + if (message.boolValue !== undefined) { + writer.uint32(32).bool(message.boolValue); + } + if (message.structValue !== undefined) { + Struct.encode(Struct.wrap(message.structValue), writer.uint32(42).fork()).ldelim(); + } + if (message.listValue !== undefined) { + ListValue.encode(ListValue.wrap(message.listValue), writer.uint32(50).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Value { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValue(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.nullValue = nullValueFromJSON(reader.int32()); + continue; + case 2: + if (tag !== 17) { + break; + } + + message.numberValue = reader.double(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.stringValue = reader.string(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.boolValue = reader.bool(); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.structValue = Struct.unwrap(Struct.decode(reader, reader.uint32())); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.listValue = ListValue.unwrap(ListValue.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Value { + return { + nullValue: isSet(object.nullValue) ? nullValueFromJSON(object.nullValue) : undefined, + numberValue: isSet(object.numberValue) ? Number(object.numberValue) : undefined, + stringValue: isSet(object.stringValue) ? String(object.stringValue) : undefined, + boolValue: isSet(object.boolValue) ? Boolean(object.boolValue) : undefined, + structValue: isObject(object.structValue) ? object.structValue : undefined, + listValue: Array.isArray(object.listValue) ? [...object.listValue] : undefined, + }; + }, + + toJSON(message: Value): unknown { + const obj: any = {}; + if (message.nullValue !== undefined) { + obj.nullValue = nullValueToJSON(message.nullValue); + } + if (message.numberValue !== undefined) { + obj.numberValue = message.numberValue; + } + if (message.stringValue !== undefined) { + obj.stringValue = message.stringValue; + } + if (message.boolValue !== undefined) { + obj.boolValue = message.boolValue; + } + if (message.structValue !== undefined) { + obj.structValue = message.structValue; + } + if (message.listValue !== undefined) { + obj.listValue = message.listValue; + } + return obj; + }, + + create(base?: DeepPartial): Value { + return Value.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Value { + const message = createBaseValue(); + message.nullValue = object.nullValue ?? undefined; + message.numberValue = object.numberValue ?? undefined; + message.stringValue = object.stringValue ?? undefined; + message.boolValue = object.boolValue ?? undefined; + message.structValue = object.structValue ?? undefined; + message.listValue = object.listValue ?? undefined; + return message; + }, + + wrap(value: any): Value { + const result = createBaseValue(); + if (value === null) { + result.nullValue = NullValue.NULL_VALUE; + } else if (typeof value === "boolean") { + result.boolValue = value; + } else if (typeof value === "number") { + result.numberValue = value; + } else if (typeof value === "string") { + result.stringValue = value; + } else if (Array.isArray(value)) { + result.listValue = value; + } else if (typeof value === "object") { + result.structValue = value; + } else if (typeof value !== "undefined") { + throw new Error("Unsupported any value type: " + typeof value); + } + return result; + }, + + unwrap(message: any): string | number | boolean | Object | null | Array | undefined { + if (message.stringValue !== undefined) { + return message.stringValue; + } else if (message?.numberValue !== undefined) { + return message.numberValue; + } else if (message?.boolValue !== undefined) { + return message.boolValue; + } else if (message?.structValue !== undefined) { + return message.structValue as any; + } else if (message?.listValue !== undefined) { + return message.listValue; + } else if (message?.nullValue !== undefined) { + return null; + } + return undefined; + }, +}; + +function createBaseListValue(): ListValue { + return { values: [] }; +} + +export const ListValue = { + encode(message: ListValue, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.values) { + Value.encode(Value.wrap(v!), writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ListValue { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListValue(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.values.push(Value.unwrap(Value.decode(reader, reader.uint32()))); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ListValue { + return { values: Array.isArray(object?.values) ? [...object.values] : [] }; + }, + + toJSON(message: ListValue): unknown { + const obj: any = {}; + if (message.values?.length) { + obj.values = message.values; + } + return obj; + }, + + create(base?: DeepPartial): ListValue { + return ListValue.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): ListValue { + const message = createBaseListValue(); + message.values = object.values?.map((e) => e) || []; + return message; + }, + + wrap(array: Array | undefined): ListValue { + const result = createBaseListValue(); + result.values = array ?? []; + return result; + }, + + unwrap(message: ListValue): Array { + if (message?.hasOwnProperty("values") && Array.isArray(message.values)) { + return message.values; + } else { + return message as any; + } + }, +}; + +export interface DataLoaderOptions { + cache?: boolean; +} + +export interface DataLoaders { + rpcDataLoaderOptions?: DataLoaderOptions; + getDataLoader(identifier: string, constructorFn: () => T): T; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isObject(value: any): boolean { + return typeof value === "object" && value !== null; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/components/spicedb/typescript/src/google/protobuf/timestamp.pb.ts b/components/spicedb/typescript/src/google/protobuf/timestamp.pb.ts new file mode 100644 index 00000000000000..fb8680145b4e9c --- /dev/null +++ b/components/spicedb/typescript/src/google/protobuf/timestamp.pb.ts @@ -0,0 +1,243 @@ +/** + * Copyright (c) 2023 Gitpod GmbH. All rights reserved. + * Licensed under the GNU Affero General Public License (AGPL). + * See License.AGPL.txt in the project root for license information. + */ + +/* eslint-disable */ +import * as _m0 from "protobufjs/minimal"; +import Long = require("long"); + +export const protobufPackage = "google.protobuf"; + +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * Example 5: Compute Timestamp from Java `Instant.now()`. + * + * Instant now = Instant.now(); + * + * Timestamp timestamp = + * Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + * .setNanos(now.getNano()).build(); + * + * Example 6: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: number; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + nanos: number; +} + +function createBaseTimestamp(): Timestamp { + return { seconds: 0, nanos: 0 }; +} + +export const Timestamp = { + encode(message: Timestamp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.seconds !== 0) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Timestamp { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestamp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.seconds = longToNumber(reader.int64() as Long); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.nanos = reader.int32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Timestamp { + return { + seconds: isSet(object.seconds) ? Number(object.seconds) : 0, + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + + toJSON(message: Timestamp): unknown { + const obj: any = {}; + if (message.seconds !== 0) { + obj.seconds = Math.round(message.seconds); + } + if (message.nanos !== 0) { + obj.nanos = Math.round(message.nanos); + } + return obj; + }, + + create(base?: DeepPartial): Timestamp { + return Timestamp.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Timestamp { + const message = createBaseTimestamp(); + message.seconds = object.seconds ?? 0; + message.nanos = object.nanos ?? 0; + return message; + }, +}; + +export interface DataLoaderOptions { + cache?: boolean; +} + +export interface DataLoaders { + rpcDataLoaderOptions?: DataLoaderOptions; + getDataLoader(identifier: string, constructorFn: () => T): T; +} + +declare const self: any | undefined; +declare const window: any | undefined; +declare const global: any | undefined; +const tsProtoGlobalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/components/spicedb/typescript/src/impl/v1/impl.pb.ts b/components/spicedb/typescript/src/impl/v1/impl.pb.ts new file mode 100644 index 00000000000000..ca94531e5286f7 --- /dev/null +++ b/components/spicedb/typescript/src/impl/v1/impl.pb.ts @@ -0,0 +1,1055 @@ +/** + * Copyright (c) 2023 Gitpod GmbH. All rights reserved. + * Licensed under the GNU Affero General Public License (AGPL). + * See License.AGPL.txt in the project root for license information. + */ + +/* eslint-disable */ +import * as _m0 from "protobufjs/minimal"; +import { CheckedExpr } from "../../google/api/expr/v1alpha1/checked.pb"; +import Long = require("long"); + +export const protobufPackage = "impl.v1"; + +export interface DecodedCaveat { + cel?: CheckedExpr | undefined; + name: string; +} + +export interface DecodedZookie { + version: number; + v1?: DecodedZookie_V1Zookie | undefined; + v2?: DecodedZookie_V2Zookie | undefined; +} + +export interface DecodedZookie_V1Zookie { + revision: number; +} + +export interface DecodedZookie_V2Zookie { + revision: string; +} + +export interface DecodedZedToken { + deprecatedV1Zookie?: DecodedZedToken_V1Zookie | undefined; + v1?: DecodedZedToken_V1ZedToken | undefined; +} + +export interface DecodedZedToken_V1Zookie { + revision: number; +} + +export interface DecodedZedToken_V1ZedToken { + revision: string; +} + +export interface DecodedCursor { + v1?: V1Cursor | undefined; +} + +export interface V1Cursor { + /** revision is the string form of the revision for the cursor. */ + revision: string; + /** sections are the sections of the dispatching cursor. */ + sections: string[]; + /** + * call_and_parameters_hash is a hash of the call that manufactured this cursor and all its + * parameters, including limits and zedtoken, to ensure no inputs changed when using this cursor. + */ + callAndParametersHash: string; +} + +export interface DocComment { + comment: string; +} + +export interface RelationMetadata { + kind: RelationMetadata_RelationKind; +} + +export enum RelationMetadata_RelationKind { + UNKNOWN_KIND = "UNKNOWN_KIND", + RELATION = "RELATION", + PERMISSION = "PERMISSION", + UNRECOGNIZED = "UNRECOGNIZED", +} + +export function relationMetadata_RelationKindFromJSON(object: any): RelationMetadata_RelationKind { + switch (object) { + case 0: + case "UNKNOWN_KIND": + return RelationMetadata_RelationKind.UNKNOWN_KIND; + case 1: + case "RELATION": + return RelationMetadata_RelationKind.RELATION; + case 2: + case "PERMISSION": + return RelationMetadata_RelationKind.PERMISSION; + case -1: + case "UNRECOGNIZED": + default: + return RelationMetadata_RelationKind.UNRECOGNIZED; + } +} + +export function relationMetadata_RelationKindToJSON(object: RelationMetadata_RelationKind): string { + switch (object) { + case RelationMetadata_RelationKind.UNKNOWN_KIND: + return "UNKNOWN_KIND"; + case RelationMetadata_RelationKind.RELATION: + return "RELATION"; + case RelationMetadata_RelationKind.PERMISSION: + return "PERMISSION"; + case RelationMetadata_RelationKind.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function relationMetadata_RelationKindToNumber(object: RelationMetadata_RelationKind): number { + switch (object) { + case RelationMetadata_RelationKind.UNKNOWN_KIND: + return 0; + case RelationMetadata_RelationKind.RELATION: + return 1; + case RelationMetadata_RelationKind.PERMISSION: + return 2; + case RelationMetadata_RelationKind.UNRECOGNIZED: + default: + return -1; + } +} + +export interface NamespaceAndRevision { + namespaceName: string; + revision: string; +} + +export interface V1Alpha1Revision { + nsRevisions: NamespaceAndRevision[]; +} + +function createBaseDecodedCaveat(): DecodedCaveat { + return { cel: undefined, name: "" }; +} + +export const DecodedCaveat = { + encode(message: DecodedCaveat, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.cel !== undefined) { + CheckedExpr.encode(message.cel, writer.uint32(10).fork()).ldelim(); + } + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecodedCaveat { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecodedCaveat(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.cel = CheckedExpr.decode(reader, reader.uint32()); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.name = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DecodedCaveat { + return { + cel: isSet(object.cel) ? CheckedExpr.fromJSON(object.cel) : undefined, + name: isSet(object.name) ? String(object.name) : "", + }; + }, + + toJSON(message: DecodedCaveat): unknown { + const obj: any = {}; + if (message.cel !== undefined) { + obj.cel = CheckedExpr.toJSON(message.cel); + } + if (message.name !== "") { + obj.name = message.name; + } + return obj; + }, + + create(base?: DeepPartial): DecodedCaveat { + return DecodedCaveat.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DecodedCaveat { + const message = createBaseDecodedCaveat(); + message.cel = (object.cel !== undefined && object.cel !== null) ? CheckedExpr.fromPartial(object.cel) : undefined; + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseDecodedZookie(): DecodedZookie { + return { version: 0, v1: undefined, v2: undefined }; +} + +export const DecodedZookie = { + encode(message: DecodedZookie, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.version !== 0) { + writer.uint32(8).uint32(message.version); + } + if (message.v1 !== undefined) { + DecodedZookie_V1Zookie.encode(message.v1, writer.uint32(18).fork()).ldelim(); + } + if (message.v2 !== undefined) { + DecodedZookie_V2Zookie.encode(message.v2, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecodedZookie { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecodedZookie(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.version = reader.uint32(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.v1 = DecodedZookie_V1Zookie.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.v2 = DecodedZookie_V2Zookie.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DecodedZookie { + return { + version: isSet(object.version) ? Number(object.version) : 0, + v1: isSet(object.v1) ? DecodedZookie_V1Zookie.fromJSON(object.v1) : undefined, + v2: isSet(object.v2) ? DecodedZookie_V2Zookie.fromJSON(object.v2) : undefined, + }; + }, + + toJSON(message: DecodedZookie): unknown { + const obj: any = {}; + if (message.version !== 0) { + obj.version = Math.round(message.version); + } + if (message.v1 !== undefined) { + obj.v1 = DecodedZookie_V1Zookie.toJSON(message.v1); + } + if (message.v2 !== undefined) { + obj.v2 = DecodedZookie_V2Zookie.toJSON(message.v2); + } + return obj; + }, + + create(base?: DeepPartial): DecodedZookie { + return DecodedZookie.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DecodedZookie { + const message = createBaseDecodedZookie(); + message.version = object.version ?? 0; + message.v1 = (object.v1 !== undefined && object.v1 !== null) + ? DecodedZookie_V1Zookie.fromPartial(object.v1) + : undefined; + message.v2 = (object.v2 !== undefined && object.v2 !== null) + ? DecodedZookie_V2Zookie.fromPartial(object.v2) + : undefined; + return message; + }, +}; + +function createBaseDecodedZookie_V1Zookie(): DecodedZookie_V1Zookie { + return { revision: 0 }; +} + +export const DecodedZookie_V1Zookie = { + encode(message: DecodedZookie_V1Zookie, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.revision !== 0) { + writer.uint32(8).uint64(message.revision); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecodedZookie_V1Zookie { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecodedZookie_V1Zookie(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.revision = longToNumber(reader.uint64() as Long); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DecodedZookie_V1Zookie { + return { revision: isSet(object.revision) ? Number(object.revision) : 0 }; + }, + + toJSON(message: DecodedZookie_V1Zookie): unknown { + const obj: any = {}; + if (message.revision !== 0) { + obj.revision = Math.round(message.revision); + } + return obj; + }, + + create(base?: DeepPartial): DecodedZookie_V1Zookie { + return DecodedZookie_V1Zookie.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DecodedZookie_V1Zookie { + const message = createBaseDecodedZookie_V1Zookie(); + message.revision = object.revision ?? 0; + return message; + }, +}; + +function createBaseDecodedZookie_V2Zookie(): DecodedZookie_V2Zookie { + return { revision: "" }; +} + +export const DecodedZookie_V2Zookie = { + encode(message: DecodedZookie_V2Zookie, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.revision !== "") { + writer.uint32(10).string(message.revision); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecodedZookie_V2Zookie { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecodedZookie_V2Zookie(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.revision = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DecodedZookie_V2Zookie { + return { revision: isSet(object.revision) ? String(object.revision) : "" }; + }, + + toJSON(message: DecodedZookie_V2Zookie): unknown { + const obj: any = {}; + if (message.revision !== "") { + obj.revision = message.revision; + } + return obj; + }, + + create(base?: DeepPartial): DecodedZookie_V2Zookie { + return DecodedZookie_V2Zookie.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DecodedZookie_V2Zookie { + const message = createBaseDecodedZookie_V2Zookie(); + message.revision = object.revision ?? ""; + return message; + }, +}; + +function createBaseDecodedZedToken(): DecodedZedToken { + return { deprecatedV1Zookie: undefined, v1: undefined }; +} + +export const DecodedZedToken = { + encode(message: DecodedZedToken, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecatedV1Zookie !== undefined) { + DecodedZedToken_V1Zookie.encode(message.deprecatedV1Zookie, writer.uint32(18).fork()).ldelim(); + } + if (message.v1 !== undefined) { + DecodedZedToken_V1ZedToken.encode(message.v1, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecodedZedToken { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecodedZedToken(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + if (tag !== 18) { + break; + } + + message.deprecatedV1Zookie = DecodedZedToken_V1Zookie.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.v1 = DecodedZedToken_V1ZedToken.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DecodedZedToken { + return { + deprecatedV1Zookie: isSet(object.deprecatedV1Zookie) + ? DecodedZedToken_V1Zookie.fromJSON(object.deprecatedV1Zookie) + : undefined, + v1: isSet(object.v1) ? DecodedZedToken_V1ZedToken.fromJSON(object.v1) : undefined, + }; + }, + + toJSON(message: DecodedZedToken): unknown { + const obj: any = {}; + if (message.deprecatedV1Zookie !== undefined) { + obj.deprecatedV1Zookie = DecodedZedToken_V1Zookie.toJSON(message.deprecatedV1Zookie); + } + if (message.v1 !== undefined) { + obj.v1 = DecodedZedToken_V1ZedToken.toJSON(message.v1); + } + return obj; + }, + + create(base?: DeepPartial): DecodedZedToken { + return DecodedZedToken.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DecodedZedToken { + const message = createBaseDecodedZedToken(); + message.deprecatedV1Zookie = (object.deprecatedV1Zookie !== undefined && object.deprecatedV1Zookie !== null) + ? DecodedZedToken_V1Zookie.fromPartial(object.deprecatedV1Zookie) + : undefined; + message.v1 = (object.v1 !== undefined && object.v1 !== null) + ? DecodedZedToken_V1ZedToken.fromPartial(object.v1) + : undefined; + return message; + }, +}; + +function createBaseDecodedZedToken_V1Zookie(): DecodedZedToken_V1Zookie { + return { revision: 0 }; +} + +export const DecodedZedToken_V1Zookie = { + encode(message: DecodedZedToken_V1Zookie, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.revision !== 0) { + writer.uint32(8).uint64(message.revision); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecodedZedToken_V1Zookie { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecodedZedToken_V1Zookie(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.revision = longToNumber(reader.uint64() as Long); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DecodedZedToken_V1Zookie { + return { revision: isSet(object.revision) ? Number(object.revision) : 0 }; + }, + + toJSON(message: DecodedZedToken_V1Zookie): unknown { + const obj: any = {}; + if (message.revision !== 0) { + obj.revision = Math.round(message.revision); + } + return obj; + }, + + create(base?: DeepPartial): DecodedZedToken_V1Zookie { + return DecodedZedToken_V1Zookie.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DecodedZedToken_V1Zookie { + const message = createBaseDecodedZedToken_V1Zookie(); + message.revision = object.revision ?? 0; + return message; + }, +}; + +function createBaseDecodedZedToken_V1ZedToken(): DecodedZedToken_V1ZedToken { + return { revision: "" }; +} + +export const DecodedZedToken_V1ZedToken = { + encode(message: DecodedZedToken_V1ZedToken, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.revision !== "") { + writer.uint32(10).string(message.revision); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecodedZedToken_V1ZedToken { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecodedZedToken_V1ZedToken(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.revision = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DecodedZedToken_V1ZedToken { + return { revision: isSet(object.revision) ? String(object.revision) : "" }; + }, + + toJSON(message: DecodedZedToken_V1ZedToken): unknown { + const obj: any = {}; + if (message.revision !== "") { + obj.revision = message.revision; + } + return obj; + }, + + create(base?: DeepPartial): DecodedZedToken_V1ZedToken { + return DecodedZedToken_V1ZedToken.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DecodedZedToken_V1ZedToken { + const message = createBaseDecodedZedToken_V1ZedToken(); + message.revision = object.revision ?? ""; + return message; + }, +}; + +function createBaseDecodedCursor(): DecodedCursor { + return { v1: undefined }; +} + +export const DecodedCursor = { + encode(message: DecodedCursor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.v1 !== undefined) { + V1Cursor.encode(message.v1, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecodedCursor { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecodedCursor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.v1 = V1Cursor.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DecodedCursor { + return { v1: isSet(object.v1) ? V1Cursor.fromJSON(object.v1) : undefined }; + }, + + toJSON(message: DecodedCursor): unknown { + const obj: any = {}; + if (message.v1 !== undefined) { + obj.v1 = V1Cursor.toJSON(message.v1); + } + return obj; + }, + + create(base?: DeepPartial): DecodedCursor { + return DecodedCursor.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DecodedCursor { + const message = createBaseDecodedCursor(); + message.v1 = (object.v1 !== undefined && object.v1 !== null) ? V1Cursor.fromPartial(object.v1) : undefined; + return message; + }, +}; + +function createBaseV1Cursor(): V1Cursor { + return { revision: "", sections: [], callAndParametersHash: "" }; +} + +export const V1Cursor = { + encode(message: V1Cursor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.revision !== "") { + writer.uint32(10).string(message.revision); + } + for (const v of message.sections) { + writer.uint32(18).string(v!); + } + if (message.callAndParametersHash !== "") { + writer.uint32(26).string(message.callAndParametersHash); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): V1Cursor { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseV1Cursor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.revision = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.sections.push(reader.string()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.callAndParametersHash = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): V1Cursor { + return { + revision: isSet(object.revision) ? String(object.revision) : "", + sections: Array.isArray(object?.sections) ? object.sections.map((e: any) => String(e)) : [], + callAndParametersHash: isSet(object.callAndParametersHash) ? String(object.callAndParametersHash) : "", + }; + }, + + toJSON(message: V1Cursor): unknown { + const obj: any = {}; + if (message.revision !== "") { + obj.revision = message.revision; + } + if (message.sections?.length) { + obj.sections = message.sections; + } + if (message.callAndParametersHash !== "") { + obj.callAndParametersHash = message.callAndParametersHash; + } + return obj; + }, + + create(base?: DeepPartial): V1Cursor { + return V1Cursor.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): V1Cursor { + const message = createBaseV1Cursor(); + message.revision = object.revision ?? ""; + message.sections = object.sections?.map((e) => e) || []; + message.callAndParametersHash = object.callAndParametersHash ?? ""; + return message; + }, +}; + +function createBaseDocComment(): DocComment { + return { comment: "" }; +} + +export const DocComment = { + encode(message: DocComment, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.comment !== "") { + writer.uint32(10).string(message.comment); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DocComment { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDocComment(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.comment = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DocComment { + return { comment: isSet(object.comment) ? String(object.comment) : "" }; + }, + + toJSON(message: DocComment): unknown { + const obj: any = {}; + if (message.comment !== "") { + obj.comment = message.comment; + } + return obj; + }, + + create(base?: DeepPartial): DocComment { + return DocComment.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DocComment { + const message = createBaseDocComment(); + message.comment = object.comment ?? ""; + return message; + }, +}; + +function createBaseRelationMetadata(): RelationMetadata { + return { kind: RelationMetadata_RelationKind.UNKNOWN_KIND }; +} + +export const RelationMetadata = { + encode(message: RelationMetadata, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== RelationMetadata_RelationKind.UNKNOWN_KIND) { + writer.uint32(8).int32(relationMetadata_RelationKindToNumber(message.kind)); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RelationMetadata { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRelationMetadata(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.kind = relationMetadata_RelationKindFromJSON(reader.int32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): RelationMetadata { + return { + kind: isSet(object.kind) + ? relationMetadata_RelationKindFromJSON(object.kind) + : RelationMetadata_RelationKind.UNKNOWN_KIND, + }; + }, + + toJSON(message: RelationMetadata): unknown { + const obj: any = {}; + if (message.kind !== RelationMetadata_RelationKind.UNKNOWN_KIND) { + obj.kind = relationMetadata_RelationKindToJSON(message.kind); + } + return obj; + }, + + create(base?: DeepPartial): RelationMetadata { + return RelationMetadata.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): RelationMetadata { + const message = createBaseRelationMetadata(); + message.kind = object.kind ?? RelationMetadata_RelationKind.UNKNOWN_KIND; + return message; + }, +}; + +function createBaseNamespaceAndRevision(): NamespaceAndRevision { + return { namespaceName: "", revision: "" }; +} + +export const NamespaceAndRevision = { + encode(message: NamespaceAndRevision, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namespaceName !== "") { + writer.uint32(10).string(message.namespaceName); + } + if (message.revision !== "") { + writer.uint32(18).string(message.revision); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NamespaceAndRevision { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNamespaceAndRevision(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.namespaceName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.revision = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): NamespaceAndRevision { + return { + namespaceName: isSet(object.namespaceName) ? String(object.namespaceName) : "", + revision: isSet(object.revision) ? String(object.revision) : "", + }; + }, + + toJSON(message: NamespaceAndRevision): unknown { + const obj: any = {}; + if (message.namespaceName !== "") { + obj.namespaceName = message.namespaceName; + } + if (message.revision !== "") { + obj.revision = message.revision; + } + return obj; + }, + + create(base?: DeepPartial): NamespaceAndRevision { + return NamespaceAndRevision.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): NamespaceAndRevision { + const message = createBaseNamespaceAndRevision(); + message.namespaceName = object.namespaceName ?? ""; + message.revision = object.revision ?? ""; + return message; + }, +}; + +function createBaseV1Alpha1Revision(): V1Alpha1Revision { + return { nsRevisions: [] }; +} + +export const V1Alpha1Revision = { + encode(message: V1Alpha1Revision, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.nsRevisions) { + NamespaceAndRevision.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): V1Alpha1Revision { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseV1Alpha1Revision(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.nsRevisions.push(NamespaceAndRevision.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): V1Alpha1Revision { + return { + nsRevisions: Array.isArray(object?.nsRevisions) + ? object.nsRevisions.map((e: any) => NamespaceAndRevision.fromJSON(e)) + : [], + }; + }, + + toJSON(message: V1Alpha1Revision): unknown { + const obj: any = {}; + if (message.nsRevisions?.length) { + obj.nsRevisions = message.nsRevisions.map((e) => NamespaceAndRevision.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): V1Alpha1Revision { + return V1Alpha1Revision.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): V1Alpha1Revision { + const message = createBaseV1Alpha1Revision(); + message.nsRevisions = object.nsRevisions?.map((e) => NamespaceAndRevision.fromPartial(e)) || []; + return message; + }, +}; + +export interface DataLoaderOptions { + cache?: boolean; +} + +export interface DataLoaders { + rpcDataLoaderOptions?: DataLoaderOptions; + getDataLoader(identifier: string, constructorFn: () => T): T; +} + +declare const self: any | undefined; +declare const window: any | undefined; +declare const global: any | undefined; +const tsProtoGlobalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/components/spicedb/typescript/tsconfig.json b/components/spicedb/typescript/tsconfig.json new file mode 100644 index 00000000000000..86746019cdbc88 --- /dev/null +++ b/components/spicedb/typescript/tsconfig.json @@ -0,0 +1,31 @@ +{ + "compilerOptions": { + "outDir": "lib", + "experimentalDecorators": true, + "lib": [ + "ES2021" + ], + "strict": true, + "noEmitOnError": false, + "noUnusedLocals": true, + "emitDecoratorMetadata": true, + "strictPropertyInitialization": false, + "downlevelIteration": true, + "module": "commonjs", + "moduleResolution": "node", + "target": "ES2021", + "jsx": "react", + "sourceMap": true, + "declaration": true, + "declarationMap": true, + "skipLibCheck": true, + "useUnknownInCatchVariables": false, + "esModuleInterop": true, + "typeRoots": [ + "./src/typings" + ] + }, + "include": [ + "src" + ] +}