From 09fe37ee109502fc588d2fb1b169e31d54b65da8 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Tue, 11 Jun 2024 18:06:08 +0200 Subject: [PATCH 01/36] update typescript tsconfig --- package.json | 6 +- packages/rsocket-router/package.json | 2 +- pnpm-lock.yaml | 111 ++++++++++++++++----------- service/package.json | 4 +- tsconfig.json | 26 +++++++ 5 files changed, 101 insertions(+), 48 deletions(-) create mode 100644 tsconfig.json diff --git a/package.json b/package.json index a58731a11..866f21dfe 100644 --- a/package.json +++ b/package.json @@ -14,6 +14,9 @@ "build:packages": "pnpm run build", "build:production": "NODE_ENV=production pnpm run -r build", "build:ts": "tsc -b", + "watch:ts": "pnpm build:ts -w", + "watch:server": "concurrently --kill-others --passthrough-arguments \"pnpm watch:ts\" \" pnpm start:server {@}\" -- ", + "start:server": "pnpm --filter powersync-open-service watch", "clean": "pnpm run -r clean", "release": "pnpm build:production && pnpm changeset publish", "test": "pnpm run -r test" @@ -30,6 +33,7 @@ "@types/node": "18.11.11", "async": "^3.2.4", "bson": "^6.6.0", + "concurrently": "^8.2.2", "inquirer": "^9.2.7", "npm-check-updates": "^16.10.15", "prettier": "^2.8.8", @@ -37,7 +41,7 @@ "rsocket-websocket-client": "1.0.0-alpha.3", "semver": "^7.5.4", "ts-node-dev": "^2.0.0", - "typescript": "^4.9.5", + "typescript": "~5.2.2", "ws": "^8.2.3" } } diff --git a/packages/rsocket-router/package.json b/packages/rsocket-router/package.json index 74d3f294d..1c740e3ec 100644 --- a/packages/rsocket-router/package.json +++ b/packages/rsocket-router/package.json @@ -29,7 +29,7 @@ "@types/ws": "~8.2.0", "bson": "^6.6.0", "rsocket-websocket-client": "1.0.0-alpha.3", - "typescript": "^5.2.2", + "typescript": "~5.2.2", "vitest": "^0.34.6" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6868b4ab6..b0ce732a8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -13,13 +13,13 @@ importers: version: 2.27.3 '@journeyapps-platform/deploy-cli': specifier: ^4.4.6 - version: 4.4.7(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@4.9.5) + version: 4.4.7(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@5.2.2) '@journeyapps-platform/deploy-config': specifier: ^3.1.0 version: 3.1.0 '@journeyapps-platform/deploy-transformers': specifier: ^1.2.32 - version: 1.2.33(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@4.9.5) + version: 1.2.33(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@5.2.2) '@journeyapps-platform/formatter-cli': specifier: ^3.1.9 version: 3.1.9 @@ -28,7 +28,7 @@ importers: version: 1.0.3 '@journeyapps-platform/micro-cli': specifier: ^7.2.6 - version: 7.2.9(@types/node@18.11.11)(typescript@4.9.5) + version: 7.2.9(@types/node@18.11.11)(typescript@5.2.2) '@journeyapps-platform/micro-dev': specifier: ^1.6.7 version: 1.6.7 @@ -41,6 +41,9 @@ importers: bson: specifier: ^6.6.0 version: 6.7.0 + concurrently: + specifier: ^8.2.2 + version: 8.2.2 inquirer: specifier: ^9.2.7 version: 9.2.22 @@ -61,10 +64,10 @@ importers: version: 7.6.2 ts-node-dev: specifier: ^2.0.0 - version: 2.0.0(@types/node@18.11.11)(typescript@4.9.5) + version: 2.0.0(@types/node@18.11.11)(typescript@5.2.2) typescript: - specifier: ^4.9.5 - version: 4.9.5 + specifier: ~5.2.2 + version: 5.2.2 ws: specifier: ^8.2.3 version: 8.2.3 @@ -118,8 +121,8 @@ importers: specifier: 1.0.0-alpha.3 version: 1.0.0-alpha.3 typescript: - specifier: ^5.2.2 - version: 5.4.5 + specifier: ~5.2.2 + version: 5.2.2 vitest: specifier: ^0.34.6 version: 0.34.6 @@ -359,10 +362,10 @@ importers: version: 16.14.20 ts-node: specifier: ^10.9.1 - version: 10.9.2(@types/node@18.11.11)(typescript@5.4.5) + version: 10.9.2(@types/node@18.11.11)(typescript@5.2.2) typescript: - specifier: ^5.2.2 - version: 5.4.5 + specifier: ~5.2.2 + version: 5.2.2 vitest: specifier: ^0.34.6 version: 0.34.6 @@ -1940,6 +1943,11 @@ packages: concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + concurrently@8.2.2: + resolution: {integrity: sha512-1dP4gpXFhei8IOtlXRE/T/4H88ElHgTiUzh71YUmtjTEHMSRS2Z/fgOxHSxxusGHogsRfxNq1vyAwxSC+EVyDg==} + engines: {node: ^14.13.0 || >=16.0.0} + hasBin: true + confbox@0.1.7: resolution: {integrity: sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==} @@ -2026,6 +2034,10 @@ packages: resolution: {integrity: sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==} engines: {node: '>= 0.4'} + date-fns@2.30.0: + resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==} + engines: {node: '>=0.11'} + debug@2.6.9: resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} peerDependencies: @@ -4000,6 +4012,9 @@ packages: sparse-bitfield@3.0.3: resolution: {integrity: sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==} + spawn-command@0.0.2: + resolution: {integrity: sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ==} + spawn-please@2.0.2: resolution: {integrity: sha512-KM8coezO6ISQ89c1BzyWNtcn2V2kAVtwIXd3cN/V5a0xPYc1F/vydrRc01wsKFEQ/p+V1a4sw4z2yMITIXrgGw==} engines: {node: '>=14'} @@ -4127,6 +4142,10 @@ packages: resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} engines: {node: '>=8'} + supports-color@8.1.1: + resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} + engines: {node: '>=10'} + supports-preserve-symlinks-flag@1.0.0: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} @@ -4321,9 +4340,9 @@ packages: engines: {node: '>=4.2.0'} hasBin: true - typescript@4.9.5: - resolution: {integrity: sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==} - engines: {node: '>=4.2.0'} + typescript@5.2.2: + resolution: {integrity: sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==} + engines: {node: '>=14.17'} hasBin: true typescript@5.4.5: @@ -5010,7 +5029,7 @@ snapshots: dependencies: chalk: 4.1.2 - '@journeyapps-platform/deploy-cli@4.4.7(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@4.9.5)': + '@journeyapps-platform/deploy-cli@4.4.7(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@5.2.2)': dependencies: '@journeyapps-platform/ci-tools': 3.0.7(@octokit/core@4.2.4(encoding@0.1.13))(encoding@0.1.13) '@journeyapps-platform/cli-logger': 2.14.8 @@ -5020,7 +5039,7 @@ snapshots: '@journeyapps-platform/shell-tools': 3.0.1 inquirer: 8.2.6 lodash: 4.17.21 - ts-node: 10.9.2(@types/node@18.11.11)(typescript@4.9.5) + ts-node: 10.9.2(@types/node@18.11.11)(typescript@5.2.2) transitivePeerDependencies: - '@octokit/core' - '@swc/core' @@ -5036,9 +5055,9 @@ snapshots: js-yaml: 4.1.0 lodash: 4.17.21 - '@journeyapps-platform/deploy-transformers@1.2.33(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@4.9.5)': + '@journeyapps-platform/deploy-transformers@1.2.33(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@5.2.2)': dependencies: - '@journeyapps-platform/deploy-cli': 4.4.7(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@4.9.5) + '@journeyapps-platform/deploy-cli': 4.4.7(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@5.2.2) inquirer: 8.2.6 transitivePeerDependencies: - '@octokit/core' @@ -5106,7 +5125,7 @@ snapshots: transitivePeerDependencies: - encoding - '@journeyapps-platform/micro-cli@7.2.9(@types/node@18.11.11)(typescript@4.9.5)': + '@journeyapps-platform/micro-cli@7.2.9(@types/node@18.11.11)(typescript@5.2.2)': dependencies: '@journeyapps-platform/cli-logger': 2.14.8 '@journeyapps-platform/deploy-config': 3.1.0 @@ -5117,7 +5136,7 @@ snapshots: inquirer: 8.2.6 js-yaml: 4.1.0 lodash: 4.17.21 - ts-node: 10.9.2(@types/node@18.11.11)(typescript@4.9.5) + ts-node: 10.9.2(@types/node@18.11.11)(typescript@5.2.2) uuid: 9.0.1 yargs: 17.7.2 transitivePeerDependencies: @@ -6771,6 +6790,18 @@ snapshots: concat-map@0.0.1: {} + concurrently@8.2.2: + dependencies: + chalk: 4.1.2 + date-fns: 2.30.0 + lodash: 4.17.21 + rxjs: 7.8.1 + shell-quote: 1.8.1 + spawn-command: 0.0.2 + supports-color: 8.1.1 + tree-kill: 1.2.2 + yargs: 17.7.2 + confbox@0.1.7: {} config-chain@1.1.13: @@ -6868,6 +6899,10 @@ snapshots: es-errors: 1.3.0 is-data-view: 1.0.1 + date-fns@2.30.0: + dependencies: + '@babel/runtime': 7.24.6 + debug@2.6.9: dependencies: ms: 2.0.0 @@ -9102,6 +9137,8 @@ snapshots: dependencies: memory-pager: 1.5.0 + spawn-command@0.0.2: {} + spawn-please@2.0.2: dependencies: cross-spawn: 7.0.3 @@ -9232,6 +9269,10 @@ snapshots: dependencies: has-flag: 4.0.0 + supports-color@8.1.1: + dependencies: + has-flag: 4.0.0 + supports-preserve-symlinks-flag@1.0.0: {} tar@6.2.1: @@ -9294,7 +9335,7 @@ snapshots: ts-codec@1.2.2: {} - ts-node-dev@2.0.0(@types/node@18.11.11)(typescript@4.9.5): + ts-node-dev@2.0.0(@types/node@18.11.11)(typescript@5.2.2): dependencies: chokidar: 3.6.0 dynamic-dedupe: 0.3.0 @@ -9304,33 +9345,15 @@ snapshots: rimraf: 2.7.1 source-map-support: 0.5.21 tree-kill: 1.2.2 - ts-node: 10.9.2(@types/node@18.11.11)(typescript@4.9.5) + ts-node: 10.9.2(@types/node@18.11.11)(typescript@5.2.2) tsconfig: 7.0.0 - typescript: 4.9.5 + typescript: 5.2.2 transitivePeerDependencies: - '@swc/core' - '@swc/wasm' - '@types/node' - ts-node@10.9.2(@types/node@18.11.11)(typescript@4.9.5): - dependencies: - '@cspotcode/source-map-support': 0.8.1 - '@tsconfig/node10': 1.0.11 - '@tsconfig/node12': 1.0.11 - '@tsconfig/node14': 1.0.3 - '@tsconfig/node16': 1.0.4 - '@types/node': 18.11.11 - acorn: 8.11.3 - acorn-walk: 8.3.2 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.2 - make-error: 1.3.6 - typescript: 4.9.5 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - - ts-node@10.9.2(@types/node@18.11.11)(typescript@5.4.5): + ts-node@10.9.2(@types/node@18.11.11)(typescript@5.2.2): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.11 @@ -9344,7 +9367,7 @@ snapshots: create-require: 1.1.1 diff: 4.0.2 make-error: 1.3.6 - typescript: 5.4.5 + typescript: 5.2.2 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 @@ -9458,7 +9481,7 @@ snapshots: typescript@3.8.3: {} - typescript@4.9.5: {} + typescript@5.2.2: {} typescript@5.4.5: {} diff --git a/service/package.json b/service/package.json index f692cc309..880404b48 100644 --- a/service/package.json +++ b/service/package.json @@ -6,7 +6,7 @@ "type": "module", "scripts": { "build": "tsc -b", - "watch": "nodemon -e ts -x node --loader ts-node/esm src/entry.ts start", + "watch": "nodemon -w lib -x node lib/entry.js start", "clean": "rm -rf ./lib && tsc -b --clean" }, "dependencies": { @@ -45,7 +45,7 @@ "nodemon": "^3.0.1", "npm-check-updates": "^16.14.4", "ts-node": "^10.9.1", - "typescript": "^5.2.2", + "typescript": "~5.2.2", "vitest": "^0.34.6" } } diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 000000000..a8bef0c63 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,26 @@ +{ + "files": [], + "references": [ + { + "path": "./packages/jpgwire" + }, + { + "path": "./packages/jsonbig" + }, + { + "path": "./packages/rsocket-router" + }, + { + "path": "./packages/service-core" + }, + { + "path": "./packages/sync-rules" + }, + { + "path": "./packages/types" + }, + { + "path": "./service" + } + ] +} From e772e91ba50d549a772e4647c596939317f89190 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Wed, 12 Jun 2024 14:07:36 +0200 Subject: [PATCH 02/36] fix watching of packages. Update imports from to relative imports --- package.json | 5 +- .../src/router/ReactiveSocketRouter.ts | 2 - packages/service-core/package.json | 5 +- packages/service-core/src/api/diagnostics.ts | 6 +- packages/service-core/src/api/schema.ts | 2 +- packages/service-core/src/entry/cli-entry.ts | 2 +- .../src/entry/commands/config-command.ts | 2 +- .../src/entry/commands/migrate-action.ts | 2 +- .../src/entry/commands/start-action.ts | 2 +- .../src/entry/commands/teardown-action.ts | 2 +- packages/service-core/src/metrics/Metrics.ts | 4 +- .../db/migrations/1684951997326-init.ts | 6 +- .../1702295701188-sync-rule-state.ts | 6 +- .../1711543888062-write-checkpoint-index.ts | 4 +- .../service-core/src/migrations/db/store.ts | 2 +- .../service-core/src/migrations/migrations.ts | 4 +- .../src/replication/WalConnection.ts | 4 +- .../service-core/src/replication/WalStream.ts | 6 +- .../src/replication/WalStreamManager.ts | 4 +- .../src/replication/WalStreamRunner.ts | 4 +- packages/service-core/src/replication/util.ts | 2 +- packages/service-core/src/routes/admin.ts | 4 +- packages/service-core/src/routes/auth.ts | 4 +- .../service-core/src/routes/checkpointing.ts | 2 +- packages/service-core/src/routes/dev.ts | 2 +- packages/service-core/src/routes/router.ts | 2 +- .../service-core/src/routes/socket-route.ts | 4 +- .../service-core/src/routes/sync-rules.ts | 2 +- .../service-core/src/routes/sync-stream.ts | 6 +- .../service-core/src/storage/BucketStorage.ts | 4 +- .../src/storage/MongoBucketStorage.ts | 6 +- .../service-core/src/storage/SourceTable.ts | 4 +- .../src/storage/mongo/MongoBucketBatch.ts | 4 +- .../storage/mongo/MongoSyncBucketStorage.ts | 6 +- .../src/storage/mongo/OperationBatch.ts | 2 +- .../src/storage/mongo/PersistedBatch.ts | 2 +- packages/service-core/src/storage/mongo/db.ts | 2 +- packages/service-core/src/sync/sync.ts | 8 +- packages/service-core/src/sync/util.ts | 4 +- .../src/system/CorePowerSyncSystem.ts | 6 +- .../util/config/compound-config-collector.ts | 2 +- .../service-core/src/util/pgwire_utils.ts | 2 +- packages/service-core/src/util/utils.ts | 2 +- packages/service-core/tsconfig.json | 5 +- packages/service-core/vitest.config.ts | 7 +- pnpm-lock.yaml | 121 ++++++++++++------ service/package.json | 14 +- 47 files changed, 169 insertions(+), 134 deletions(-) diff --git a/package.json b/package.json index 866f21dfe..eb0097a97 100644 --- a/package.json +++ b/package.json @@ -15,8 +15,8 @@ "build:production": "NODE_ENV=production pnpm run -r build", "build:ts": "tsc -b", "watch:ts": "pnpm build:ts -w", - "watch:server": "concurrently --kill-others --passthrough-arguments \"pnpm watch:ts\" \" pnpm start:server {@}\" -- ", - "start:server": "pnpm --filter powersync-open-service watch", + "watch:service": "concurrently --passthrough-arguments \"pnpm watch:ts\" \" pnpm start:service {@}\" -- ", + "start:service": "pnpm --filter powersync-open-service watch", "clean": "pnpm run -r clean", "release": "pnpm build:production && pnpm changeset publish", "test": "pnpm run -r test" @@ -40,6 +40,7 @@ "rsocket-core": "1.0.0-alpha.3", "rsocket-websocket-client": "1.0.0-alpha.3", "semver": "^7.5.4", + "tsc-watch": "^6.2.0", "ts-node-dev": "^2.0.0", "typescript": "~5.2.2", "ws": "^8.2.3" diff --git a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts index 5d1696e5f..59bc79fdb 100644 --- a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts +++ b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts @@ -40,13 +40,11 @@ export class ReactiveSocketRouter { * web sockets router. */ const wss = new ws.WebSocketServer({ noServer: true }); - server.on('upgrade', (request, socket, head) => { wss.handleUpgrade(request, socket as any, head, (ws) => { wss.emit('connection', ws, request); }); }); - server.on('close', () => wss.close()); const transport = new WebsocketServerTransport({ diff --git a/packages/service-core/package.json b/packages/service-core/package.json index 65fce0e2e..b42808f82 100644 --- a/packages/service-core/package.json +++ b/packages/service-core/package.json @@ -10,8 +10,8 @@ "license": "FSL-1.1-Apache-2.0", "type": "module", "scripts": { - "build": "tsc -b && tsc-alias", - "build:tests": "tsc -b test/tsconfig.json && tsc-alias", + "build": "tsc -b", + "build:tests": "tsc -b test/tsconfig.json", "test": "vitest --no-threads", "clean": "rm -rf ./lib && tsc -b --clean" }, @@ -47,7 +47,6 @@ "devDependencies": { "@journeyapps-platform/micro-dev": "^1.6.7", "@types/uuid": "^9.0.4", - "tsc-alias": "^1.8.10", "typescript": "^5.2.2", "vite-tsconfig-paths": "^4.3.2", "vitest": "^0.34.6" diff --git a/packages/service-core/src/api/diagnostics.ts b/packages/service-core/src/api/diagnostics.ts index a66653675..a2efdf31b 100644 --- a/packages/service-core/src/api/diagnostics.ts +++ b/packages/service-core/src/api/diagnostics.ts @@ -3,9 +3,9 @@ import { DEFAULT_TAG, SourceTableInterface, SqlSyncRules } from '@powersync/serv import { pgwireRows } from '@powersync/service-jpgwire'; import { ConnectionStatus, SyncRulesStatus, TableInfo, baseUri } from '@powersync/service-types'; -import * as replication from '@/replication/replication-index.js'; -import * as storage from '@/storage/storage-index.js'; -import * as util from '@/util/util-index.js'; +import * as replication from '../replication/replication-index.js'; +import * as storage from '../storage/storage-index.js'; +import * as util from '../util/util-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; diff --git a/packages/service-core/src/api/schema.ts b/packages/service-core/src/api/schema.ts index e7294993a..e3ffbb744 100644 --- a/packages/service-core/src/api/schema.ts +++ b/packages/service-core/src/api/schema.ts @@ -2,7 +2,7 @@ import type * as pgwire from '@powersync/service-jpgwire'; import { pgwireRows } from '@powersync/service-jpgwire'; import { DatabaseSchema, internal_routes } from '@powersync/service-types'; -import * as util from '@/util/util-index.js'; +import * as util from '../util/util-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; export async function getConnectionsSchema(system: CorePowerSyncSystem): Promise { diff --git a/packages/service-core/src/entry/cli-entry.ts b/packages/service-core/src/entry/cli-entry.ts index 7d36a8c22..c5f011bd8 100644 --- a/packages/service-core/src/entry/cli-entry.ts +++ b/packages/service-core/src/entry/cli-entry.ts @@ -2,7 +2,7 @@ import { Command } from 'commander'; import * as micro from '@journeyapps-platform/micro'; -import * as utils from '@/util/util-index.js'; +import * as utils from '../util/util-index.js'; import { registerMigrationAction } from './commands/migrate-action.js'; import { registerTearDownAction } from './commands/teardown-action.js'; import { registerStartAction } from './entry-index.js'; diff --git a/packages/service-core/src/entry/commands/config-command.ts b/packages/service-core/src/entry/commands/config-command.ts index b7c72fc61..b54b14113 100644 --- a/packages/service-core/src/entry/commands/config-command.ts +++ b/packages/service-core/src/entry/commands/config-command.ts @@ -1,6 +1,6 @@ import { Command } from 'commander'; -import * as util from '@/util/util-index.js'; +import * as util from '../../util/util-index.js'; /** * Wraps a Command with the standard config options diff --git a/packages/service-core/src/entry/commands/migrate-action.ts b/packages/service-core/src/entry/commands/migrate-action.ts index 49b4df1ce..c18aa4ede 100644 --- a/packages/service-core/src/entry/commands/migrate-action.ts +++ b/packages/service-core/src/entry/commands/migrate-action.ts @@ -2,7 +2,7 @@ import { Command } from 'commander'; import { Direction } from '@journeyapps-platform/micro-migrate'; import { extractRunnerOptions, wrapConfigCommand } from './config-command.js'; -import { migrate } from '@/migrations/migrations.js'; +import { migrate } from '../../migrations/migrations.js'; const COMMAND_NAME = 'migrate'; diff --git a/packages/service-core/src/entry/commands/start-action.ts b/packages/service-core/src/entry/commands/start-action.ts index b3bc12344..9b1cceb23 100644 --- a/packages/service-core/src/entry/commands/start-action.ts +++ b/packages/service-core/src/entry/commands/start-action.ts @@ -1,6 +1,6 @@ import { Command } from 'commander'; -import * as utils from '@/util/util-index.js'; +import * as utils from '../../util/util-index.js'; import { extractRunnerOptions, wrapConfigCommand } from './config-command.js'; const COMMAND_NAME = 'start'; diff --git a/packages/service-core/src/entry/commands/teardown-action.ts b/packages/service-core/src/entry/commands/teardown-action.ts index 5139e729e..3f0e402aa 100644 --- a/packages/service-core/src/entry/commands/teardown-action.ts +++ b/packages/service-core/src/entry/commands/teardown-action.ts @@ -1,7 +1,7 @@ import { Command } from 'commander'; import { extractRunnerOptions, wrapConfigCommand } from './config-command.js'; -import { teardown } from '@/runner/teardown.js'; +import { teardown } from '../../runner/teardown.js'; const COMMAND_NAME = 'teardown'; diff --git a/packages/service-core/src/metrics/Metrics.ts b/packages/service-core/src/metrics/Metrics.ts index d4d10738f..bb8c3aece 100644 --- a/packages/service-core/src/metrics/Metrics.ts +++ b/packages/service-core/src/metrics/Metrics.ts @@ -4,8 +4,8 @@ import { PrometheusExporter } from '@opentelemetry/exporter-prometheus'; import { MeterProvider, MetricReader, PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics'; import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-http'; import * as jpgwire from '@powersync/service-jpgwire'; -import * as util from '@/util/util-index.js'; -import * as storage from '@/storage/storage-index.js'; +import * as util from '../util/util-index.js'; +import * as storage from '../storage/storage-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; import { Resource } from '@opentelemetry/resources'; diff --git a/packages/service-core/src/migrations/db/migrations/1684951997326-init.ts b/packages/service-core/src/migrations/db/migrations/1684951997326-init.ts index 9ff09efc6..e41adf260 100644 --- a/packages/service-core/src/migrations/db/migrations/1684951997326-init.ts +++ b/packages/service-core/src/migrations/db/migrations/1684951997326-init.ts @@ -1,6 +1,6 @@ -import * as mongo from '@/db/mongo.js'; -import * as storage from '@/storage/storage-index.js'; -import * as utils from '@/util/util-index.js'; +import * as mongo from '../../../db/mongo.js'; +import * as storage from '../../../storage/storage-index.js'; +import * as utils from '../../../util/util-index.js'; export const up = async (context?: utils.MigrationContext) => { const config = await utils.loadConfig(context?.runner_config); diff --git a/packages/service-core/src/migrations/db/migrations/1702295701188-sync-rule-state.ts b/packages/service-core/src/migrations/db/migrations/1702295701188-sync-rule-state.ts index 3f13c8150..4a802e7de 100644 --- a/packages/service-core/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +++ b/packages/service-core/src/migrations/db/migrations/1702295701188-sync-rule-state.ts @@ -1,6 +1,6 @@ -import * as mongo from '@/db/mongo.js'; -import * as storage from '@/storage/storage-index.js'; -import * as utils from '@/util/util-index.js'; +import * as mongo from '../../../db/mongo.js'; +import * as storage from '../../../storage/storage-index.js'; +import * as utils from '../../../util/util-index.js'; interface LegacySyncRulesDocument extends storage.SyncRuleDocument { /** diff --git a/packages/service-core/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts b/packages/service-core/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts index e21ec63db..a73f0c7d7 100644 --- a/packages/service-core/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +++ b/packages/service-core/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts @@ -1,5 +1,5 @@ -import * as storage from '@/storage/storage-index.js'; -import * as utils from '@/util/util-index.js'; +import * as storage from '../../../storage/storage-index.js'; +import * as utils from '../../../util/util-index.js'; export const up = async (context?: utils.MigrationContext) => { const config = await utils.loadConfig(context?.runner_config); diff --git a/packages/service-core/src/migrations/db/store.ts b/packages/service-core/src/migrations/db/store.ts index fb243420a..7f23f4eab 100644 --- a/packages/service-core/src/migrations/db/store.ts +++ b/packages/service-core/src/migrations/db/store.ts @@ -1,5 +1,5 @@ import * as micro_migrate from '@journeyapps-platform/micro-migrate'; -import * as utils from '@/util/util-index.js'; +import * as utils from '../../util/util-index.js'; const config = await utils.loadConfig(); diff --git a/packages/service-core/src/migrations/migrations.ts b/packages/service-core/src/migrations/migrations.ts index 72d824366..258bfce2a 100644 --- a/packages/service-core/src/migrations/migrations.ts +++ b/packages/service-core/src/migrations/migrations.ts @@ -5,8 +5,8 @@ import { fileURLToPath } from 'url'; import { Direction, createMongoMigrationStore, execute, writeLogsToStore } from '@journeyapps-platform/micro-migrate'; -import * as db from '@/db/db-index.js'; -import * as util from '@/util/util-index.js'; +import * as db from '../db/db-index.js'; +import * as util from '../util/util-index.js'; const DEFAULT_MONGO_LOCK_COLLECTION = 'locks'; const MONGO_LOCK_PROCESS = 'migrations'; diff --git a/packages/service-core/src/replication/WalConnection.ts b/packages/service-core/src/replication/WalConnection.ts index 6545e0a33..5fa3cf8a7 100644 --- a/packages/service-core/src/replication/WalConnection.ts +++ b/packages/service-core/src/replication/WalConnection.ts @@ -3,8 +3,8 @@ import { pgwireRows } from '@powersync/service-jpgwire'; import { DEFAULT_TAG, SqlSyncRules, TablePattern } from '@powersync/service-sync-rules'; import { ReplicationError, TableInfo } from '@powersync/service-types'; -import * as storage from '@/storage/storage-index.js'; -import * as util from '@/util/util-index.js'; +import * as storage from '../storage/storage-index.js'; +import * as util from '../util/util-index.js'; import { ReplicaIdentityResult, getReplicationIdentityColumns } from './util.js'; /** diff --git a/packages/service-core/src/replication/WalStream.ts b/packages/service-core/src/replication/WalStream.ts index 16e24ff8b..d85f5f6d0 100644 --- a/packages/service-core/src/replication/WalStream.ts +++ b/packages/service-core/src/replication/WalStream.ts @@ -4,13 +4,13 @@ import * as micro from '@journeyapps-platform/micro'; import { logger } from '@journeyapps-platform/micro'; import { SqliteRow, SqlSyncRules, TablePattern, toSyncRulesRow } from '@powersync/service-sync-rules'; -import * as storage from '@/storage/storage-index.js'; -import * as util from '@/util/util-index.js'; +import * as storage from '../storage/storage-index.js'; +import * as util from '../util/util-index.js'; import { getPgOutputRelation, getRelId, PgRelation } from './PgRelation.js'; import { getReplicationIdentityColumns } from './util.js'; import { WalConnection } from './WalConnection.js'; -import { Metrics } from '@/metrics/Metrics.js'; +import { Metrics } from '../metrics/Metrics.js'; export const ZERO_LSN = '00000000/00000000'; diff --git a/packages/service-core/src/replication/WalStreamManager.ts b/packages/service-core/src/replication/WalStreamManager.ts index 486093516..e817b564d 100644 --- a/packages/service-core/src/replication/WalStreamManager.ts +++ b/packages/service-core/src/replication/WalStreamManager.ts @@ -2,8 +2,8 @@ import * as pgwire from '@powersync/service-jpgwire'; import * as micro from '@journeyapps-platform/micro'; import { hrtime } from 'node:process'; -import * as storage from '@/storage/storage-index.js'; -import * as util from '@/util/util-index.js'; +import * as storage from '../storage/storage-index.js'; +import * as util from '../util/util-index.js'; import { DefaultErrorRateLimiter } from './ErrorRateLimiter.js'; import { touch } from './WalStream.js'; diff --git a/packages/service-core/src/replication/WalStreamRunner.ts b/packages/service-core/src/replication/WalStreamRunner.ts index 63bd506c2..0d1b6cc6f 100644 --- a/packages/service-core/src/replication/WalStreamRunner.ts +++ b/packages/service-core/src/replication/WalStreamRunner.ts @@ -1,8 +1,8 @@ import * as pgwire from '@powersync/service-jpgwire'; import * as micro from '@journeyapps-platform/micro'; -import * as storage from '@/storage/storage-index.js'; -import * as util from '@/util/util-index.js'; +import * as storage from '../storage/storage-index.js'; +import * as util from '../util/util-index.js'; import { ErrorRateLimiter } from './ErrorRateLimiter.js'; import { MissingReplicationSlotError, WalStream } from './WalStream.js'; diff --git a/packages/service-core/src/replication/util.ts b/packages/service-core/src/replication/util.ts index 9c807dc0e..8c719205a 100644 --- a/packages/service-core/src/replication/util.ts +++ b/packages/service-core/src/replication/util.ts @@ -1,6 +1,6 @@ import * as pgwire from '@powersync/service-jpgwire'; -import * as util from '@/util/util-index.js'; +import * as util from '../util/util-index.js'; import { ReplicationColumn, ReplicationIdentity } from './PgRelation.js'; export interface ReplicaIdentityResult { diff --git a/packages/service-core/src/routes/admin.ts b/packages/service-core/src/routes/admin.ts index 4502a66f3..c12065d6b 100644 --- a/packages/service-core/src/routes/admin.ts +++ b/packages/service-core/src/routes/admin.ts @@ -2,8 +2,8 @@ import * as micro from '@journeyapps-platform/micro'; import { SqlSyncRules, SqliteValue, StaticSchema, isJsonValue, toSyncRulesValue } from '@powersync/service-sync-rules'; import { internal_routes } from '@powersync/service-types'; -import * as api from '@/api/api-index.js'; -import * as util from '@/util/util-index.js'; +import * as api from '../api/api-index.js'; +import * as util from '../util/util-index.js'; import { RouteGenerator } from './router.js'; import { PersistedSyncRulesContent } from '../storage/BucketStorage.js'; diff --git a/packages/service-core/src/routes/auth.ts b/packages/service-core/src/routes/auth.ts index f82eeac27..a8955ef61 100644 --- a/packages/service-core/src/routes/auth.ts +++ b/packages/service-core/src/routes/auth.ts @@ -2,8 +2,8 @@ import * as micro from '@journeyapps-platform/micro'; import { FastifyRequest } from 'fastify'; import * as jose from 'jose'; -import * as auth from '@/auth/auth-index.js'; -import * as util from '@/util/util-index.js'; +import * as auth from '../auth/auth-index.js'; +import * as util from '../util/util-index.js'; import { Context } from './router.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; diff --git a/packages/service-core/src/routes/checkpointing.ts b/packages/service-core/src/routes/checkpointing.ts index 9828c782b..f4bbf3118 100644 --- a/packages/service-core/src/routes/checkpointing.ts +++ b/packages/service-core/src/routes/checkpointing.ts @@ -1,7 +1,7 @@ import * as t from 'ts-codec'; import * as micro from '@journeyapps-platform/micro'; -import * as util from '@/util/util-index.js'; +import * as util from '../util/util-index.js'; import { authUser } from './auth.js'; import { RouteGenerator } from './router.js'; diff --git a/packages/service-core/src/routes/dev.ts b/packages/service-core/src/routes/dev.ts index fca055c75..6967462e9 100644 --- a/packages/service-core/src/routes/dev.ts +++ b/packages/service-core/src/routes/dev.ts @@ -2,7 +2,7 @@ import * as t from 'ts-codec'; import * as micro from '@journeyapps-platform/micro'; import * as pgwire from '@powersync/service-jpgwire'; -import * as util from '@/util/util-index.js'; +import * as util from '../util/util-index.js'; import { authDevUser, authUser, endpoint, issueDevToken, issueLegacyDevToken, issuePowerSyncToken } from './auth.js'; import { RouteGenerator } from './router.js'; diff --git a/packages/service-core/src/routes/router.ts b/packages/service-core/src/routes/router.ts index 452f12d8a..9e2e393a6 100644 --- a/packages/service-core/src/routes/router.ts +++ b/packages/service-core/src/routes/router.ts @@ -1,6 +1,6 @@ import * as micro from '@journeyapps-platform/micro'; -import * as auth from '@/auth/auth-index.js'; +import * as auth from '../auth/auth-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; export type Context = { diff --git a/packages/service-core/src/routes/socket-route.ts b/packages/service-core/src/routes/socket-route.ts index 7890fa1f3..23167e757 100644 --- a/packages/service-core/src/routes/socket-route.ts +++ b/packages/service-core/src/routes/socket-route.ts @@ -2,11 +2,11 @@ import { serialize } from 'bson'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; import * as micro from '@journeyapps-platform/micro'; -import * as util from '@/util/util-index.js'; +import * as util from '../util/util-index.js'; import { streamResponse } from '../sync/sync.js'; import { SyncRoutes } from './sync-stream.js'; import { SocketRouteGenerator } from './router-socket.js'; -import { Metrics } from '@/metrics/Metrics.js'; +import { Metrics } from '../metrics/Metrics.js'; export const sync_stream_reactive: SocketRouteGenerator = (router) => router.reactiveStream(SyncRoutes.STREAM, { diff --git a/packages/service-core/src/routes/sync-rules.ts b/packages/service-core/src/routes/sync-rules.ts index 548577872..ede9d2457 100644 --- a/packages/service-core/src/routes/sync-rules.ts +++ b/packages/service-core/src/routes/sync-rules.ts @@ -4,7 +4,7 @@ import * as micro from '@journeyapps-platform/micro'; import * as pgwire from '@powersync/service-jpgwire'; import { SqlSyncRules, SyncRulesErrors } from '@powersync/service-sync-rules'; -import * as replication from '@/replication/replication-index.js'; +import * as replication from '../replication/replication-index.js'; import { authApi } from './auth.js'; import { RouteGenerator } from './router.js'; diff --git a/packages/service-core/src/routes/sync-stream.ts b/packages/service-core/src/routes/sync-stream.ts index d99876650..87bb1244d 100644 --- a/packages/service-core/src/routes/sync-stream.ts +++ b/packages/service-core/src/routes/sync-stream.ts @@ -2,12 +2,12 @@ import { Readable } from 'stream'; import * as micro from '@journeyapps-platform/micro'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; -import * as sync from '@/sync/sync-index.js'; -import * as util from '@/util/util-index.js'; +import * as sync from '../sync/sync-index.js'; +import * as util from '../util/util-index.js'; import { authUser } from './auth.js'; import { RouteGenerator } from './router.js'; -import { Metrics } from '@/metrics/Metrics.js'; +import { Metrics } from '../metrics/Metrics.js'; export enum SyncRoutes { STREAM = '/sync/stream' diff --git a/packages/service-core/src/storage/BucketStorage.ts b/packages/service-core/src/storage/BucketStorage.ts index 091bdda9a..5796d2159 100644 --- a/packages/service-core/src/storage/BucketStorage.ts +++ b/packages/service-core/src/storage/BucketStorage.ts @@ -8,8 +8,8 @@ import { ToastableSqliteRow } from '@powersync/service-sync-rules'; -import * as replication from '@/replication/replication-index.js'; -import * as util from '@/util/util-index.js'; +import * as replication from '../replication/replication-index.js'; +import * as util from '../util/util-index.js'; import { SourceTable } from './SourceTable.js'; export interface BucketStorageFactory { diff --git a/packages/service-core/src/storage/MongoBucketStorage.ts b/packages/service-core/src/storage/MongoBucketStorage.ts index 8e5ef4323..0fadfedfe 100644 --- a/packages/service-core/src/storage/MongoBucketStorage.ts +++ b/packages/service-core/src/storage/MongoBucketStorage.ts @@ -5,9 +5,9 @@ import * as micro from '@journeyapps-platform/micro'; import { SqlSyncRules } from '@powersync/service-sync-rules'; import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js'; -import * as replication from '@/replication/replication-index.js'; -import * as sync from '@/sync/sync-index.js'; -import * as util from '@/util/util-index.js'; +import * as replication from '../replication/replication-index.js'; +import * as sync from '../sync/sync-index.js'; +import * as util from '../util/util-index.js'; import { ActiveCheckpoint, diff --git a/packages/service-core/src/storage/SourceTable.ts b/packages/service-core/src/storage/SourceTable.ts index b6d748f8c..960ee4881 100644 --- a/packages/service-core/src/storage/SourceTable.ts +++ b/packages/service-core/src/storage/SourceTable.ts @@ -1,7 +1,7 @@ import { DEFAULT_SCHEMA, DEFAULT_TAG } from '@powersync/service-sync-rules'; -import * as replication from '@/replication/replication-index.js'; -import * as util from '@/util/util-index.js'; +import * as replication from '../replication/replication-index.js'; +import * as util from '../util/util-index.js'; export class SourceTable { static readonly DEFAULT_SCHEMA = DEFAULT_SCHEMA; diff --git a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts index bed1bdb5d..75832cbd4 100644 --- a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts +++ b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts @@ -3,8 +3,8 @@ import { SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules'; import * as bson from 'bson'; import * as mongo from 'mongodb'; -import * as util from '@/util/util-index.js'; -import * as replication from '@/replication/replication-index.js'; +import * as util from '../../util/util-index.js'; +import * as replication from '../../replication/replication-index.js'; import { BucketStorageBatch, FlushedResult, mergeToast, SaveOptions } from '../BucketStorage.js'; import { SourceTable } from '../SourceTable.js'; import { PowerSyncMongo } from './db.js'; diff --git a/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts b/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts index 012a22f5d..58aae774a 100644 --- a/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts +++ b/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts @@ -2,9 +2,9 @@ import { SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service import * as bson from 'bson'; import * as mongo from 'mongodb'; -import * as db from '@/db/db-index.js'; -import * as replication from '@/replication/WalStream.js'; -import * as util from '@/util/util-index.js'; +import * as db from '../../db/db-index.js'; +import * as replication from '../../replication/WalStream.js'; +import * as util from '../../util/util-index.js'; import { BucketDataBatchOptions, BucketStorageBatch, diff --git a/packages/service-core/src/storage/mongo/OperationBatch.ts b/packages/service-core/src/storage/mongo/OperationBatch.ts index 0de8f6e53..7b2ce7473 100644 --- a/packages/service-core/src/storage/mongo/OperationBatch.ts +++ b/packages/service-core/src/storage/mongo/OperationBatch.ts @@ -1,7 +1,7 @@ import * as bson from 'bson'; import { ToastableSqliteRow } from '@powersync/service-sync-rules'; -import * as util from '@/util/util-index.js'; +import * as util from '../../util/util-index.js'; import { SaveOptions } from '../BucketStorage.js'; /** diff --git a/packages/service-core/src/storage/mongo/PersistedBatch.ts b/packages/service-core/src/storage/mongo/PersistedBatch.ts index e88fcb5e5..e8cb873eb 100644 --- a/packages/service-core/src/storage/mongo/PersistedBatch.ts +++ b/packages/service-core/src/storage/mongo/PersistedBatch.ts @@ -4,7 +4,7 @@ import * as bson from 'bson'; import * as mongo from 'mongodb'; import * as micro from '@journeyapps-platform/micro'; -import * as util from '@/util/util-index.js'; +import * as util from '../../util/util-index.js'; import { SourceTable } from '../SourceTable.js'; import { currentBucketKey } from './MongoBucketBatch.js'; import { MongoIdSequence } from './MongoIdSequence.js'; diff --git a/packages/service-core/src/storage/mongo/db.ts b/packages/service-core/src/storage/mongo/db.ts index 92246b245..5db901899 100644 --- a/packages/service-core/src/storage/mongo/db.ts +++ b/packages/service-core/src/storage/mongo/db.ts @@ -1,7 +1,7 @@ import * as mongo from 'mongodb'; import * as micro from '@journeyapps-platform/micro'; -import * as db from '@/db/db-index.js'; +import * as db from '../../db/db-index.js'; import { BucketDataDocument, diff --git a/packages/service-core/src/sync/sync.ts b/packages/service-core/src/sync/sync.ts index 618a21a79..818d82a22 100644 --- a/packages/service-core/src/sync/sync.ts +++ b/packages/service-core/src/sync/sync.ts @@ -4,13 +4,13 @@ import { SyncParameters } from '@powersync/service-sync-rules'; import { Semaphore } from 'async-mutex'; import { AbortError } from 'ix/aborterror.js'; -import * as auth from '@/auth/auth-index.js'; -import * as storage from '@/storage/storage-index.js'; -import * as util from '@/util/util-index.js'; +import * as auth from '../auth/auth-index.js'; +import * as storage from '../storage/storage-index.js'; +import * as util from '../util/util-index.js'; import { mergeAsyncIterables } from './merge.js'; import { TokenStreamOptions, tokenStream } from './util.js'; -import { Metrics } from '@/metrics/Metrics.js'; +import { Metrics } from '../metrics/Metrics.js'; /** * Maximum number of connections actively fetching data. diff --git a/packages/service-core/src/sync/util.ts b/packages/service-core/src/sync/util.ts index 478c35284..b0f6153cc 100644 --- a/packages/service-core/src/sync/util.ts +++ b/packages/service-core/src/sync/util.ts @@ -1,7 +1,7 @@ import * as timers from 'timers/promises'; -import * as util from '@/util/util-index.js'; -import { Metrics } from '@/metrics/Metrics.js'; +import * as util from '../util/util-index.js'; +import { Metrics } from '../metrics/Metrics.js'; export type TokenStreamOptions = { /** diff --git a/packages/service-core/src/system/CorePowerSyncSystem.ts b/packages/service-core/src/system/CorePowerSyncSystem.ts index 64079804d..304d38b5a 100644 --- a/packages/service-core/src/system/CorePowerSyncSystem.ts +++ b/packages/service-core/src/system/CorePowerSyncSystem.ts @@ -1,9 +1,9 @@ import * as pgwire from '@powersync/service-jpgwire'; import * as micro from '@journeyapps-platform/micro'; -import * as auth from '@/auth/auth-index.js'; -import * as storage from '@/storage/storage-index.js'; -import * as utils from '@/util/util-index.js'; +import * as auth from '../auth/auth-index.js'; +import * as storage from '../storage/storage-index.js'; +import * as utils from '../util/util-index.js'; export abstract class CorePowerSyncSystem extends micro.system.MicroSystem { abstract storage: storage.BucketStorageFactory; diff --git a/packages/service-core/src/util/config/compound-config-collector.ts b/packages/service-core/src/util/config/compound-config-collector.ts index ab3a99444..04afa471a 100644 --- a/packages/service-core/src/util/config/compound-config-collector.ts +++ b/packages/service-core/src/util/config/compound-config-collector.ts @@ -2,7 +2,7 @@ import * as micro from '@journeyapps-platform/micro'; import { configFile, normalizeConnection } from '@powersync/service-types'; import { ConfigCollector } from './collectors/config-collector.js'; import { ResolvedConnection, ResolvedPowerSyncConfig, RunnerConfig, SyncRulesConfig } from './types.js'; -import * as auth from '@/auth/auth-index.js'; +import * as auth from '../../auth/auth-index.js'; import { SyncRulesCollector } from './sync-rules/sync-collector.js'; import { Base64ConfigCollector } from './collectors/impl/base64-config-collector.js'; import { FileSystemConfigCollector } from './collectors/impl/filesystem-config-collector.js'; diff --git a/packages/service-core/src/util/pgwire_utils.ts b/packages/service-core/src/util/pgwire_utils.ts index ad579b649..645224504 100644 --- a/packages/service-core/src/util/pgwire_utils.ts +++ b/packages/service-core/src/util/pgwire_utils.ts @@ -6,7 +6,7 @@ import * as pgwire from '@powersync/service-jpgwire'; import { SqliteJsonValue, SqliteRow, ToastableSqliteRow, toSyncRulesRow } from '@powersync/service-sync-rules'; import * as micro from '@journeyapps-platform/micro'; -import * as replication from '@/replication/replication-index.js'; +import * as replication from '../replication/replication-index.js'; /** * pgwire message -> SQLite row. diff --git a/packages/service-core/src/util/utils.ts b/packages/service-core/src/util/utils.ts index 4a7bfe462..a10533b86 100644 --- a/packages/service-core/src/util/utils.ts +++ b/packages/service-core/src/util/utils.ts @@ -3,7 +3,7 @@ import * as pgwire from '@powersync/service-jpgwire'; import { pgwireRows } from '@powersync/service-jpgwire'; import * as micro from '@journeyapps-platform/micro'; -import * as storage from '@/storage/storage-index.js'; +import * as storage from '../storage/storage-index.js'; import { BucketChecksum, OpId } from './protocol-types.js'; import { retriedQuery } from './pgwire_utils.js'; diff --git a/packages/service-core/tsconfig.json b/packages/service-core/tsconfig.json index de3c64b93..c7666d1c1 100644 --- a/packages/service-core/tsconfig.json +++ b/packages/service-core/tsconfig.json @@ -6,10 +6,7 @@ "baseUrl": ".", "esModuleInterop": true, "skipLibCheck": true, - "sourceMap": true, - "paths": { - "@/*": ["./src/*"] - } + "sourceMap": true }, "include": ["src"], "references": [ diff --git a/packages/service-core/vitest.config.ts b/packages/service-core/vitest.config.ts index 75bbc73a5..6b7a908fc 100644 --- a/packages/service-core/vitest.config.ts +++ b/packages/service-core/vitest.config.ts @@ -2,10 +2,5 @@ import { defineConfig } from 'vitest/config'; import tsconfigPaths from 'vite-tsconfig-paths'; export default defineConfig({ - plugins: [tsconfigPaths()], - test: { - alias: { - '@/': new URL('./src/', import.meta.url).pathname - } - } + plugins: [tsconfigPaths()] }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b0ce732a8..f9430c404 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -65,6 +65,9 @@ importers: ts-node-dev: specifier: ^2.0.0 version: 2.0.0(@types/node@18.11.11)(typescript@5.2.2) + tsc-watch: + specifier: ^6.2.0 + version: 6.2.0(typescript@5.2.2) typescript: specifier: ~5.2.2 version: 5.2.2 @@ -217,9 +220,6 @@ importers: '@types/uuid': specifier: ^9.0.4 version: 9.0.8 - tsc-alias: - specifier: ^1.8.10 - version: 1.8.10 typescript: specifier: ^5.2.2 version: 5.4.5 @@ -1936,10 +1936,6 @@ packages: commander@2.20.3: resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} - commander@9.5.0: - resolution: {integrity: sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==} - engines: {node: ^12.20.0 || >=14} - concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} @@ -2146,6 +2142,9 @@ packages: resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} engines: {node: '>=12'} + duplexer@0.1.2: + resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} + dynamic-dedupe@0.3.0: resolution: {integrity: sha512-ssuANeD+z97meYOqd50e04Ze5qp4bPqo8cCkI4TRjZkzAUgIDTrXV1R8QCdINpiI+hw14+rYazvTRdQrz0/rFQ==} @@ -2248,6 +2247,9 @@ packages: resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} engines: {node: '>= 0.6'} + event-stream@3.3.4: + resolution: {integrity: sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g==} + event-target-shim@5.0.1: resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} engines: {node: '>=6'} @@ -2370,6 +2372,9 @@ packages: resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} engines: {node: '>= 0.6'} + from@0.1.7: + resolution: {integrity: sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g==} + fs-extra@11.2.0: resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==} engines: {node: '>=14.14'} @@ -3026,6 +3031,9 @@ packages: resolution: {integrity: sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==} engines: {node: '>=8'} + map-stream@0.1.0: + resolution: {integrity: sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g==} + media-typer@0.3.0: resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==} engines: {node: '>= 0.6'} @@ -3216,10 +3224,6 @@ packages: resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - mylas@2.1.13: - resolution: {integrity: sha512-+MrqnJRtxdF+xngFfUUkIMQrUUL0KsxbADUkn23Z/4ibGg192Q+z+CQyiYwvWTsYjJygmMR8+w3ZDa98Zh6ESg==} - engines: {node: '>=12.0.0'} - nanoid@3.3.7: resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} @@ -3233,6 +3237,9 @@ packages: resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} engines: {node: '>= 0.6'} + node-cleanup@2.1.2: + resolution: {integrity: sha512-qN8v/s2PAJwGUtr1/hYTpNKlD6Y9rc4p8KSmJXyGdYGZsDGKXrGThikLFP9OCHFeLeEpQzPwiAtdIvBLqm//Hw==} + node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} @@ -3485,6 +3492,9 @@ packages: pathval@1.1.1: resolution: {integrity: sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==} + pause-stream@0.0.11: + resolution: {integrity: sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A==} + pgsql-ast-parser@11.2.0: resolution: {integrity: sha512-/8KCcQjePoQDOtfZQuoV/4Y3WpmQVp7E+RFayAdjJpdBdu2dBnKnuQe9XU4g5Td5qC0G+i/fFK/DlNjvWwg+FA==} @@ -3521,10 +3531,6 @@ packages: pkg-types@1.1.1: resolution: {integrity: sha512-ko14TjmDuQJ14zsotODv7dBlwxKhUKQEhuhmbqo1uCi9BB0Z2alo/wAXg6q1dTR5TyuqYyWhjtfe/Tsh+X28jQ==} - plimit-lit@1.6.1: - resolution: {integrity: sha512-B7+VDyb8Tl6oMJT9oSO2CW8XC/T4UcJGrwOVoNGwOQsQYhlpfajmrMj5xeejqaASq3V/EqThyOeATEOMuSEXiA==} - engines: {node: '>=12'} - possible-typed-array-names@1.0.0: resolution: {integrity: sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==} engines: {node: '>= 0.4'} @@ -3602,6 +3608,11 @@ packages: resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} engines: {node: '>= 0.10'} + ps-tree@1.2.0: + resolution: {integrity: sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA==} + engines: {node: '>= 0.10'} + hasBin: true + pseudomap@1.0.2: resolution: {integrity: sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==} @@ -3620,10 +3631,6 @@ packages: resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==} engines: {node: '>=0.6'} - queue-lit@1.5.2: - resolution: {integrity: sha512-tLc36IOPeMAubu8BkW8YDBV+WyIgKlYU7zUNs0J5Vk9skSZ4JfGlPOqplP0aHdfv7HL0B2Pg6nwiq60Qc6M2Hw==} - engines: {node: '>=12'} - queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} @@ -4041,6 +4048,9 @@ packages: split@0.2.10: resolution: {integrity: sha512-e0pKq+UUH2Xq/sXbYpZBZc3BawsfDZ7dgv+JtRTUPNcvF5CMR4Y9cvJqkMY0MoxWzTHvZuz1beg6pNEKlszPiQ==} + split@0.3.3: + resolution: {integrity: sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA==} + sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} @@ -4069,9 +4079,16 @@ packages: std-env@3.7.0: resolution: {integrity: sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==} + stream-combiner@0.0.4: + resolution: {integrity: sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw==} + stream-transform@2.1.3: resolution: {integrity: sha512-9GHUiM5hMiCi6Y03jD2ARC1ettBXkQBoQAe7nJsPknnI0ow10aXjTnew8QtYQmLjzn974BnmWEAJgCY6ZP1DeQ==} + string-argv@0.3.2: + resolution: {integrity: sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==} + engines: {node: '>=0.6.19'} + string-template@0.2.1: resolution: {integrity: sha512-Yptehjogou2xm4UJbxJ4CxgZx12HBfeystp0y3x7s4Dj32ltVVG1Gg8YhKjHZkHicuKpZX/ffilA8505VbUbpw==} @@ -4251,9 +4268,12 @@ packages: engines: {node: '>=4.2.0'} hasBin: true - tsc-alias@1.8.10: - resolution: {integrity: sha512-Ibv4KAWfFkFdKJxnWfVtdOmB0Zi1RJVxcbPGiCDsFpCQSsmpWyuzHG3rQyI5YkobWwxFPEyQfu1hdo4qLG2zPw==} + tsc-watch@6.2.0: + resolution: {integrity: sha512-2LBhf9kjKXnz7KQ/puLHlozMzzUNHAdYBNMkg3eksQJ9GBAgMg8czznM83T5PmsoUvDnXzfIeQn2lNcIYDr8LA==} + engines: {node: '>=12.12.0'} hasBin: true + peerDependencies: + typescript: '*' tsconfck@3.0.3: resolution: {integrity: sha512-4t0noZX9t6GcPTfBAbIbbIU4pfpCwh0ueq3S4O/5qXI1VwK1outmxhe9dOiEWqMz3MW2LKgDTpqWV+37IWuVbA==} @@ -6786,8 +6806,6 @@ snapshots: commander@2.20.3: {} - commander@9.5.0: {} - concat-map@0.0.1: {} concurrently@8.2.2: @@ -6985,6 +7003,8 @@ snapshots: dotenv@16.4.5: {} + duplexer@0.1.2: {} + dynamic-dedupe@0.3.0: dependencies: xtend: 4.0.2 @@ -7139,6 +7159,16 @@ snapshots: etag@1.8.1: {} + event-stream@3.3.4: + dependencies: + duplexer: 0.1.2 + from: 0.1.7 + map-stream: 0.1.0 + pause-stream: 0.0.11 + split: 0.3.3 + stream-combiner: 0.0.4 + through: 2.3.8 + event-target-shim@5.0.1: {} events@3.3.0: {} @@ -7327,6 +7357,8 @@ snapshots: fresh@0.5.2: {} + from@0.1.7: {} + fs-extra@11.2.0: dependencies: graceful-fs: 4.2.11 @@ -8027,6 +8059,8 @@ snapshots: map-obj@4.3.0: {} + map-stream@0.1.0: {} + media-typer@0.3.0: {} memory-pager@1.5.0: {} @@ -8190,8 +8224,6 @@ snapshots: mute-stream@1.0.0: {} - mylas@2.1.13: {} - nanoid@3.3.7: {} nearley@2.20.1: @@ -8203,6 +8235,8 @@ snapshots: negotiator@0.6.3: {} + node-cleanup@2.1.2: {} + node-domexception@1.0.0: {} node-fetch@2.7.0(encoding@0.1.13): @@ -8530,6 +8564,10 @@ snapshots: pathval@1.1.1: {} + pause-stream@0.0.11: + dependencies: + through: 2.3.8 + pgsql-ast-parser@11.2.0: dependencies: moo: 0.5.2 @@ -8574,10 +8612,6 @@ snapshots: mlly: 1.7.0 pathe: 1.1.2 - plimit-lit@1.6.1: - dependencies: - queue-lit: 1.5.2 - possible-typed-array-names@1.0.0: {} postcss@8.4.38: @@ -8664,6 +8698,10 @@ snapshots: forwarded: 0.2.0 ipaddr.js: 1.9.1 + ps-tree@1.2.0: + dependencies: + event-stream: 3.3.4 + pseudomap@1.0.2: {} pstree.remy@1.1.8: {} @@ -8678,8 +8716,6 @@ snapshots: dependencies: side-channel: 1.0.6 - queue-lit@1.5.2: {} - queue-microtask@1.2.3: {} quick-format-unescaped@4.0.4: {} @@ -9168,6 +9204,10 @@ snapshots: dependencies: through: 2.3.8 + split@0.3.3: + dependencies: + through: 2.3.8 + sprintf-js@1.0.3: {} sprintf-js@1.1.3: {} @@ -9190,10 +9230,16 @@ snapshots: std-env@3.7.0: {} + stream-combiner@0.0.4: + dependencies: + duplexer: 0.1.2 + stream-transform@2.1.3: dependencies: mixme: 0.5.10 + string-argv@0.3.2: {} + string-template@0.2.1: {} string-width@4.2.3: @@ -9382,14 +9428,13 @@ snapshots: source-map-support: 0.5.21 yn: 2.0.0 - tsc-alias@1.8.10: + tsc-watch@6.2.0(typescript@5.2.2): dependencies: - chokidar: 3.6.0 - commander: 9.5.0 - globby: 11.1.0 - mylas: 2.1.13 - normalize-path: 3.0.0 - plimit-lit: 1.6.1 + cross-spawn: 7.0.3 + node-cleanup: 2.1.2 + ps-tree: 1.2.0 + string-argv: 0.3.2 + typescript: 5.2.2 tsconfck@3.0.3(typescript@5.4.5): optionalDependencies: diff --git a/service/package.json b/service/package.json index 880404b48..d4578bbb2 100644 --- a/service/package.json +++ b/service/package.json @@ -6,22 +6,22 @@ "type": "module", "scripts": { "build": "tsc -b", - "watch": "nodemon -w lib -x node lib/entry.js start", + "watch": "nodemon -w ../ -e ts -e js --delay 1 -x node --loader ts-node/esm src/entry.ts start", "clean": "rm -rf ./lib && tsc -b --clean" }, "dependencies": { "@fastify/cors": "8.4.1", "@journeyapps-platform/micro": "^17.0.1", "@journeyapps-platform/micro-migrate": "^4.0.1", - "@powersync/service-types": "workspace:*", - "@powersync/service-jpgwire": "workspace:*", - "@powersync/service-jsonbig": "workspace:*", - "@powersync/service-sync-rules": "workspace:*", - "@powersync/service-rsocket-router": "workspace:*", - "@powersync/service-core": "workspace:*", "@opentelemetry/api": "~1.6.0", "@opentelemetry/exporter-prometheus": "^0.43.0", "@opentelemetry/sdk-metrics": "^1.17.0", + "@powersync/service-core": "workspace:*", + "@powersync/service-jpgwire": "workspace:*", + "@powersync/service-jsonbig": "workspace:*", + "@powersync/service-rsocket-router": "workspace:*", + "@powersync/service-sync-rules": "workspace:*", + "@powersync/service-types": "workspace:*", "async-mutex": "^0.5.0", "bson": "^6.6.0", "commander": "^12.0.0", From b255fb0610bb5527f02abd0565e83f9089601027 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Wed, 12 Jun 2024 14:11:07 +0200 Subject: [PATCH 03/36] add base tsconfig --- packages/jpgwire/tsconfig.json | 4 ++-- packages/jsonbig/tsconfig.json | 2 +- packages/rsocket-router/tsconfig.json | 2 +- packages/service-core/tsconfig.json | 2 +- packages/sync-rules/tsconfig.json | 2 +- packages/types/tsconfig.json | 4 ++-- service/Dockerfile | 2 +- service/tsconfig.json | 2 +- tsconfig.base.json | 15 +++++++++++++++ 9 files changed, 25 insertions(+), 10 deletions(-) create mode 100644 tsconfig.base.json diff --git a/packages/jpgwire/tsconfig.json b/packages/jpgwire/tsconfig.json index d0ef51e52..f84b49829 100644 --- a/packages/jpgwire/tsconfig.json +++ b/packages/jpgwire/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "@journeyapps-platform/micro-dev/tsconfig.base.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "dist", "declarationDir": "dist", @@ -14,7 +14,7 @@ "path": "../types" }, { - "path": "../jsonbig" + "path": "../jsonbig" } ] } diff --git a/packages/jsonbig/tsconfig.json b/packages/jsonbig/tsconfig.json index 14299df6f..302128830 100644 --- a/packages/jsonbig/tsconfig.json +++ b/packages/jsonbig/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "@journeyapps-platform/micro-dev/tsconfig.base.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "dist", "declarationDir": "dist", diff --git a/packages/rsocket-router/tsconfig.json b/packages/rsocket-router/tsconfig.json index 2d7e272d0..d37ee074d 100644 --- a/packages/rsocket-router/tsconfig.json +++ b/packages/rsocket-router/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "@journeyapps-platform/micro-dev/tsconfig.base.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "dist", "declarationDir": "dist", diff --git a/packages/service-core/tsconfig.json b/packages/service-core/tsconfig.json index c7666d1c1..fd340e1cd 100644 --- a/packages/service-core/tsconfig.json +++ b/packages/service-core/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "@journeyapps-platform/micro-dev/tsconfig.base.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "rootDir": "src", "outDir": "dist", diff --git a/packages/sync-rules/tsconfig.json b/packages/sync-rules/tsconfig.json index 21be702db..c2e4b7818 100644 --- a/packages/sync-rules/tsconfig.json +++ b/packages/sync-rules/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "@journeyapps-platform/micro-dev/tsconfig.base.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "dist", "declarationDir": "dist", diff --git a/packages/types/tsconfig.json b/packages/types/tsconfig.json index 8b32f09bd..1e693739e 100644 --- a/packages/types/tsconfig.json +++ b/packages/types/tsconfig.json @@ -1,10 +1,10 @@ { - "extends": "@journeyapps-platform/micro-dev/tsconfig.base.json", + "extends": "../../tsconfig.base.json", "compilerOptions": { "outDir": "dist", "declarationDir": "dist", "rootDir": "src", "skipLibCheck": true }, - "include": ["src"], + "include": ["src"] } diff --git a/service/Dockerfile b/service/Dockerfile index 527bf2ee6..dbb1a33e6 100644 --- a/service/Dockerfile +++ b/service/Dockerfile @@ -9,7 +9,7 @@ RUN echo //npm.pkg.github.com/:_authToken=\${GITHUB_TOKEN} >> ~/.npmrc && \ echo //npm.pkg.github.com/journeyapps-platform/:_authToken=\${GITHUB_TOKEN} >> ~/.npmrc && \ echo @journeyapps-platform:registry=https://npm.pkg.github.com/journeyapps-platform/ >> ~/.npmrc -COPY package.json pnpm-workspace.yaml pnpm-lock.yaml ./ +COPY package.json pnpm-workspace.yaml pnpm-lock.yaml tsconfig.base.json ./ COPY service/package.json service/tsconfig.json service/ COPY packages/jpgwire/package.json packages/jpgwire/tsconfig.json packages/jpgwire/ diff --git a/service/tsconfig.json b/service/tsconfig.json index c3388d0a9..7efe0ecef 100644 --- a/service/tsconfig.json +++ b/service/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "@journeyapps-platform/micro-dev/tsconfig.base.json", + "extends": "../tsconfig.base.json", "compilerOptions": { "rootDir": "src", "outDir": "lib", diff --git a/tsconfig.base.json b/tsconfig.base.json new file mode 100644 index 000000000..6afdcd92f --- /dev/null +++ b/tsconfig.base.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "lib": ["ES2021"], + "target": "ES2020", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "strict": true, + "composite": true, + "declaration": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "sourceMap": true, + "useUnknownInCatchVariables": false + } +} From 9278764d54bd69928cab592825be4c9094efc0a1 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Wed, 12 Jun 2024 14:14:31 +0200 Subject: [PATCH 04/36] remove micro dev packages --- package.json | 7 ----- packages/rsocket-router/tests/tsconfig.json | 31 ++++++++++----------- packages/service-core/package.json | 1 - packages/service-core/test/tsconfig.json | 2 +- service/package.json | 1 - 5 files changed, 16 insertions(+), 26 deletions(-) diff --git a/package.json b/package.json index eb0097a97..7ed484b4d 100644 --- a/package.json +++ b/package.json @@ -23,13 +23,6 @@ }, "devDependencies": { "@changesets/cli": "^2.27.3", - "@journeyapps-platform/deploy-cli": "^4.4.6", - "@journeyapps-platform/deploy-config": "^3.1.0", - "@journeyapps-platform/deploy-transformers": "^1.2.32", - "@journeyapps-platform/formatter-cli": "^3.1.9", - "@journeyapps-platform/lerno": "^1.0.3", - "@journeyapps-platform/micro-cli": "^7.2.6", - "@journeyapps-platform/micro-dev": "^1.6.7", "@types/node": "18.11.11", "async": "^3.2.4", "bson": "^6.6.0", diff --git a/packages/rsocket-router/tests/tsconfig.json b/packages/rsocket-router/tests/tsconfig.json index ff3dd489c..e8c037025 100644 --- a/packages/rsocket-router/tests/tsconfig.json +++ b/packages/rsocket-router/tests/tsconfig.json @@ -1,17 +1,16 @@ { - "extends": "@journeyapps-platform/micro-dev/tsconfig.base.json", - "compilerOptions": { - "rootDir": "src", - "noEmit": true, - "esModuleInterop": true, - "skipLibCheck": true, - "sourceMap": true - }, - "include": ["src"], - "references": [ - { - "path": "../" - } - ] - } - \ No newline at end of file + "extends": "../../../../tsconfig.base.json", + "compilerOptions": { + "rootDir": "src", + "noEmit": true, + "esModuleInterop": true, + "skipLibCheck": true, + "sourceMap": true + }, + "include": ["src"], + "references": [ + { + "path": "../" + } + ] +} diff --git a/packages/service-core/package.json b/packages/service-core/package.json index b42808f82..59f1e071f 100644 --- a/packages/service-core/package.json +++ b/packages/service-core/package.json @@ -45,7 +45,6 @@ "yaml": "^2.3.2" }, "devDependencies": { - "@journeyapps-platform/micro-dev": "^1.6.7", "@types/uuid": "^9.0.4", "typescript": "^5.2.2", "vite-tsconfig-paths": "^4.3.2", diff --git a/packages/service-core/test/tsconfig.json b/packages/service-core/test/tsconfig.json index 904459e13..4e77a4239 100644 --- a/packages/service-core/test/tsconfig.json +++ b/packages/service-core/test/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "@journeyapps-platform/micro-dev/tsconfig.base.json", + "extends": "../../../tsconfig.base.json", "compilerOptions": { "rootDir": "src", "noEmit": true, diff --git a/service/package.json b/service/package.json index d4578bbb2..e6a9dc1bf 100644 --- a/service/package.json +++ b/service/package.json @@ -39,7 +39,6 @@ "yaml": "^2.3.2" }, "devDependencies": { - "@journeyapps-platform/micro-dev": "^1.6.7", "@types/uuid": "^9.0.4", "copyfiles": "^2.4.1", "nodemon": "^3.0.1", From 299ac0ed17d91adcd7559ba6df7e53ffb686010d Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Wed, 12 Jun 2024 17:31:30 +0200 Subject: [PATCH 05/36] use winston logger --- package.json | 1 - packages/service-core/package.json | 7 +- packages/service-core/src/api/diagnostics.ts | 4 +- packages/service-core/src/auth/KeyStore.ts | 4 +- packages/service-core/src/entry/cli-entry.ts | 5 +- packages/service-core/src/index.ts | 3 +- packages/service-core/src/metrics/Metrics.ts | 12 +- .../service-core/src/replication/WalStream.ts | 24 +- .../src/replication/WalStreamManager.ts | 20 +- .../src/replication/WalStreamRunner.ts | 12 +- .../service-core/src/routes/socket-route.ts | 3 +- .../service-core/src/routes/sync-stream.ts | 3 +- packages/service-core/src/runner/teardown.ts | 4 +- .../src/storage/MongoBucketStorage.ts | 8 +- .../src/storage/mongo/MongoBucketBatch.ts | 17 +- .../src/storage/mongo/MongoSyncRulesLock.ts | 6 +- .../src/storage/mongo/PersistedBatch.ts | 4 +- packages/service-core/src/sync/sync.ts | 8 +- packages/service-core/src/system/Logger.ts | 8 + .../service-core/src/system/system-index.ts | 2 + .../impl/filesystem-config-collector.ts | 4 +- .../util/config/compound-config-collector.ts | 6 +- packages/service-core/src/util/env.ts | 4 +- .../service-core/src/util/memory-tracking.ts | 4 +- .../service-core/src/util/pgwire_utils.ts | 4 +- packages/service-core/src/util/utils.ts | 8 +- packages/service-core/tsconfig.json | 1 - pnpm-lock.yaml | 1277 +++-------------- service/package.json | 1 + service/src/entry.ts | 11 +- 30 files changed, 277 insertions(+), 1198 deletions(-) create mode 100644 packages/service-core/src/system/Logger.ts create mode 100644 packages/service-core/src/system/system-index.ts diff --git a/package.json b/package.json index 7ed484b4d..a5a5f4ace 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,6 @@ "keywords": [], "author": "JourneyApps Platform", "scripts": { - "prepare": "journey-formatter install", "check-updates": "ncu -u --deep", "format": "journey-formatter format", "build": "pnpm run -r build && pnpm run -r build:tests", diff --git a/packages/service-core/package.json b/packages/service-core/package.json index 59f1e071f..f7825abc6 100644 --- a/packages/service-core/package.json +++ b/packages/service-core/package.json @@ -19,15 +19,15 @@ "@journeyapps-platform/micro": "^17.0.1", "@journeyapps-platform/micro-migrate": "^4.0.1", "@opentelemetry/api": "~1.8.0", - "@opentelemetry/resources": "^1.24.1", - "@opentelemetry/exporter-prometheus": "^0.51.1", "@opentelemetry/exporter-metrics-otlp-http": "^0.51.1", + "@opentelemetry/exporter-prometheus": "^0.51.1", + "@opentelemetry/resources": "^1.24.1", "@opentelemetry/sdk-metrics": "1.24.1", "@powersync/service-jpgwire": "workspace:*", "@powersync/service-jsonbig": "workspace:*", "@powersync/service-rsocket-router": "workspace:*", - "@powersync/service-types": "workspace:*", "@powersync/service-sync-rules": "workspace:*", + "@powersync/service-types": "workspace:*", "async-mutex": "^0.5.0", "bson": "^6.6.0", "commander": "^12.0.0", @@ -42,6 +42,7 @@ "pgwire": "github:kagis/pgwire#f1cb95f9a0f42a612bb5a6b67bb2eb793fc5fc87", "ts-codec": "^1.2.2", "uuid": "^9.0.1", + "winston": "^3.13.0", "yaml": "^2.3.2" }, "devDependencies": { diff --git a/packages/service-core/src/api/diagnostics.ts b/packages/service-core/src/api/diagnostics.ts index a2efdf31b..125259397 100644 --- a/packages/service-core/src/api/diagnostics.ts +++ b/packages/service-core/src/api/diagnostics.ts @@ -1,4 +1,3 @@ -import * as micro from '@journeyapps-platform/micro'; import { DEFAULT_TAG, SourceTableInterface, SqlSyncRules } from '@powersync/service-sync-rules'; import { pgwireRows } from '@powersync/service-jpgwire'; import { ConnectionStatus, SyncRulesStatus, TableInfo, baseUri } from '@powersync/service-types'; @@ -8,6 +7,7 @@ import * as storage from '../storage/storage-index.js'; import * as util from '../util/util-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; +import { logger } from '../system/Logger.js'; export async function getConnectionStatus(system: CorePowerSyncSystem): Promise { if (system.pgwire_pool == null) { @@ -134,7 +134,7 @@ export async function getSyncRulesStatus( } } catch (e) { // Ignore - micro.logger.warn(`Unable to get replication lag`, e); + logger.warn(`Unable to get replication lag`, e); } } } else { diff --git a/packages/service-core/src/auth/KeyStore.ts b/packages/service-core/src/auth/KeyStore.ts index 00f6877c5..fdd214117 100644 --- a/packages/service-core/src/auth/KeyStore.ts +++ b/packages/service-core/src/auth/KeyStore.ts @@ -2,8 +2,8 @@ import * as jose from 'jose'; import secs from '../util/secs.js'; import { KeyOptions, KeySpec, SUPPORTED_ALGORITHMS } from './KeySpec.js'; import { KeyCollector } from './KeyCollector.js'; -import * as micro from '@journeyapps-platform/micro'; import { JwtPayload } from './JwtPayload.js'; +import { logger } from '../system/Logger.js'; /** * KeyStore to get keys and verify tokens. @@ -145,7 +145,7 @@ export class KeyStore { this.collector.noKeyFound?.().catch((e) => { // Typically this error would be stored on the collector. // This is just a last resort error handling. - micro.logger.error(`Failed to refresh keys`, e); + logger.error(`Failed to refresh keys`, e); }); throw new jose.errors.JOSEError( diff --git a/packages/service-core/src/entry/cli-entry.ts b/packages/service-core/src/entry/cli-entry.ts index c5f011bd8..ee216baae 100644 --- a/packages/service-core/src/entry/cli-entry.ts +++ b/packages/service-core/src/entry/cli-entry.ts @@ -1,11 +1,10 @@ import { Command } from 'commander'; -import * as micro from '@journeyapps-platform/micro'; - import * as utils from '../util/util-index.js'; import { registerMigrationAction } from './commands/migrate-action.js'; import { registerTearDownAction } from './commands/teardown-action.js'; import { registerStartAction } from './entry-index.js'; +import { logger } from '../system/Logger.js'; /** * Generates a Commander program which serves as the entry point @@ -33,7 +32,7 @@ export function generateEntryProgram(startHandlers?: Record { @@ -212,7 +212,7 @@ Anonymous telemetry is currently: ${options.disable_telemetry_sharing ? 'disable function getMetrics() { if (cachedRequest == null || Date.now() - cacheTimestamp > MINIMUM_INTERVAL) { cachedRequest = system.storage.getStorageMetrics().catch((e) => { - micro.logger.error(`Failed to get storage metrics`, e); + logger.error(`Failed to get storage metrics`, e); return null; }); cacheTimestamp = Date.now(); diff --git a/packages/service-core/src/replication/WalStream.ts b/packages/service-core/src/replication/WalStream.ts index d85f5f6d0..15c187959 100644 --- a/packages/service-core/src/replication/WalStream.ts +++ b/packages/service-core/src/replication/WalStream.ts @@ -1,7 +1,6 @@ import * as fs from 'fs/promises'; import * as pgwire from '@powersync/service-jpgwire'; import * as micro from '@journeyapps-platform/micro'; -import { logger } from '@journeyapps-platform/micro'; import { SqliteRow, SqlSyncRules, TablePattern, toSyncRulesRow } from '@powersync/service-sync-rules'; import * as storage from '../storage/storage-index.js'; @@ -11,6 +10,7 @@ import { getPgOutputRelation, getRelId, PgRelation } from './PgRelation.js'; import { getReplicationIdentityColumns } from './util.js'; import { WalConnection } from './WalConnection.js'; import { Metrics } from '../metrics/Metrics.js'; +import { logger } from '../system/Logger.js'; export const ZERO_LSN = '00000000/00000000'; @@ -160,7 +160,7 @@ export class WalStream { ] }); if (rs.rows.length == 0) { - micro.logger.info(`Skipping ${tablePattern.schema}.${name} - not part of ${this.publication_name} publication`); + logger.info(`Skipping ${tablePattern.schema}.${name} - not part of ${this.publication_name} publication`); continue; } @@ -190,7 +190,7 @@ export class WalStream { const status = await this.storage.getStatus(); if (status.snapshot_done && status.checkpoint_lsn) { - micro.logger.info(`${slotName} Initial replication already done`); + logger.info(`${slotName} Initial replication already done`); let last_error = null; @@ -222,11 +222,11 @@ export class WalStream { ] }); // Success - micro.logger.info(`Slot ${slotName} appears healthy`); + logger.info(`Slot ${slotName} appears healthy`); return { needsInitialSync: false }; } catch (e) { last_error = e; - micro.logger.warn(`${slotName} Replication slot error`, e); + logger.warn(`${slotName} Replication slot error`, e); if (this.stopped) { throw e; @@ -253,7 +253,7 @@ export class WalStream { // Sample: publication "powersync" does not exist // Happens when publication deleted or never created. // Slot must be re-created in this case. - micro.logger.info(`${slotName} does not exist anymore, will create new slot`); + logger.info(`${slotName} does not exist anymore, will create new slot`); throw new MissingReplicationSlotError(`Replication slot ${slotName} does not exist anymore`); } @@ -316,7 +316,7 @@ WHERE oid = $1::regclass`, // with streaming replication. const lsn = pgwire.lsnMakeComparable(row[1]); const snapshot = row[2]; - micro.logger.info(`Created replication slot ${slotName} at ${lsn} with snapshot ${snapshot}`); + logger.info(`Created replication slot ${slotName} at ${lsn} with snapshot ${snapshot}`); // https://stackoverflow.com/questions/70160769/postgres-logical-replication-starting-from-given-lsn await db.query('BEGIN'); @@ -338,9 +338,9 @@ WHERE oid = $1::regclass`, // On Supabase, the default is 2 minutes. await db.query(`set local statement_timeout = 0`); - micro.logger.info(`${slotName} Starting initial replication`); + logger.info(`${slotName} Starting initial replication`); await this.initialReplication(db, lsn); - micro.logger.info(`${slotName} Initial replication done`); + logger.info(`${slotName} Initial replication done`); await db.query('COMMIT'); } catch (e) { await db.query('ROLLBACK'); @@ -371,7 +371,7 @@ WHERE oid = $1::regclass`, } private async snapshotTable(batch: storage.BucketStorageBatch, db: pgwire.PgConnection, table: storage.SourceTable) { - micro.logger.info(`${this.slot_name} Replicating ${table.qualifiedName}`); + logger.info(`${this.slot_name} Replicating ${table.qualifiedName}`); const estimatedCount = await this.estimatedCount(db, table); let at = 0; const cursor = await db.stream({ statement: `SELECT * FROM ${table.escapedIdentifier}` }); @@ -393,7 +393,7 @@ WHERE oid = $1::regclass`, return q; }); if (at % 5000 == 0 && rows.length > 0) { - micro.logger.info(`${this.slot_name} Replicating ${table.qualifiedName} ${at}/${estimatedCount}`); + logger.info(`${this.slot_name} Replicating ${table.qualifiedName} ${at}/${estimatedCount}`); } if (this.abort_signal.aborted) { throw new Error(`Aborted initial replication of ${this.slot_name}`); @@ -583,7 +583,7 @@ WHERE oid = $1::regclass`, await this.ack(msg.lsn!, replicationStream); } else { if (count % 100 == 0) { - micro.logger.info(`${this.slot_name} replicating op ${count} ${msg.lsn}`); + logger.info(`${this.slot_name} replicating op ${count} ${msg.lsn}`); } count += 1; diff --git a/packages/service-core/src/replication/WalStreamManager.ts b/packages/service-core/src/replication/WalStreamManager.ts index e817b564d..ffcb264ab 100644 --- a/packages/service-core/src/replication/WalStreamManager.ts +++ b/packages/service-core/src/replication/WalStreamManager.ts @@ -1,5 +1,4 @@ import * as pgwire from '@powersync/service-jpgwire'; -import * as micro from '@journeyapps-platform/micro'; import { hrtime } from 'node:process'; import * as storage from '../storage/storage-index.js'; @@ -9,6 +8,7 @@ import { DefaultErrorRateLimiter } from './ErrorRateLimiter.js'; import { touch } from './WalStream.js'; import { WalStreamRunner } from './WalStreamRunner.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; +import { logger } from '../system/Logger.js'; // 5 minutes const PING_INTERVAL = 1_000_000_000n * 300n; @@ -37,7 +37,7 @@ export class WalStreamManager { start() { this.runLoop().catch((e) => { - micro.logger.error(`Fatal WalStream error`, e); + logger.error(`Fatal WalStream error`, e); util.captureException(e); setTimeout(() => { process.exit(1); @@ -58,7 +58,7 @@ export class WalStreamManager { const configured_sync_rules = await util.loadSyncRules(this.system.config); let configured_lock: storage.ReplicationLock | undefined = undefined; if (configured_sync_rules != null) { - micro.logger.info('Loading sync rules from configuration'); + logger.info('Loading sync rules from configuration'); try { // Configure new sync rules, if it has changed. // In that case, also immediately take out a lock, so that another process doesn't start replication on it. @@ -70,10 +70,10 @@ export class WalStreamManager { } } catch (e) { // Log, but continue with previous sync rules - micro.logger.error(`Failed to load sync rules from configuration`, e); + logger.error(`Failed to load sync rules from configuration`, e); } } else { - micro.logger.info('No sync rules configured - configure via API'); + logger.info('No sync rules configured - configure via API'); } while (!this.stopped) { await touch(); @@ -93,7 +93,7 @@ export class WalStreamManager { } } } catch (e) { - micro.logger.error(`Failed to refresh wal streams`, e); + logger.error(`Failed to refresh wal streams`, e); } await new Promise((resolve) => setTimeout(resolve, 5000)); } @@ -117,7 +117,7 @@ export class WalStreamManager { try { await db.query(`SELECT * FROM pg_logical_emit_message(false, 'powersync', 'ping')`); } catch (e) { - micro.logger.warn(`Failed to ping`, e); + logger.warn(`Failed to ping`, e); } this.lastPing = now; } @@ -168,7 +168,7 @@ export class WalStreamManager { // for example from stricter validation that was added. // This will be retried every couple of seconds. // When new (valid) sync rules are deployed and processed, this one be disabled. - micro.logger.error(`Failed to start replication for ${syncRules.slot_name}`, e); + logger.error(`Failed to start replication for ${syncRules.slot_name}`, e); } } } @@ -184,7 +184,7 @@ export class WalStreamManager { await stream.terminate(); } catch (e) { // This will be retried - micro.logger.warn(`Failed to terminate ${stream.slot_name}`, e); + logger.warn(`Failed to terminate ${stream.slot_name}`, e); } } @@ -207,7 +207,7 @@ export class WalStreamManager { await lock.release(); } } catch (e) { - micro.logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); + logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); } } } diff --git a/packages/service-core/src/replication/WalStreamRunner.ts b/packages/service-core/src/replication/WalStreamRunner.ts index 0d1b6cc6f..9151abed9 100644 --- a/packages/service-core/src/replication/WalStreamRunner.ts +++ b/packages/service-core/src/replication/WalStreamRunner.ts @@ -1,5 +1,4 @@ import * as pgwire from '@powersync/service-jpgwire'; -import * as micro from '@journeyapps-platform/micro'; import * as storage from '../storage/storage-index.js'; import * as util from '../util/util-index.js'; @@ -7,6 +6,7 @@ import * as util from '../util/util-index.js'; import { ErrorRateLimiter } from './ErrorRateLimiter.js'; import { MissingReplicationSlotError, WalStream } from './WalStream.js'; import { ResolvedConnection } from '../util/config/types.js'; +import { logger } from '../system/Logger.js'; export interface WalStreamRunnerOptions { factory: storage.BucketStorageFactory; @@ -51,7 +51,7 @@ export class WalStreamRunner { replication_slot: this.slot_name } }); - micro.logger.error(`Replication failed on ${this.slot_name}`, e); + logger.error(`Replication failed on ${this.slot_name}`, e); if (e instanceof MissingReplicationSlotError) { // This stops replication on this slot, and creates a new slot @@ -96,7 +96,7 @@ export class WalStreamRunner { }); await stream.replicate(); } catch (e) { - micro.logger.error(`Replication error`, e); + logger.error(`Replication error`, e); if (e.cause != null) { // Example: // PgError.conn_ended: Unable to do postgres query on ended connection @@ -118,7 +118,7 @@ export class WalStreamRunner { // [Symbol(pg.ErrorResponse)]: undefined // } // Without this additional log, the cause would not be visible in the logs. - micro.logger.error(`cause`, e.cause); + logger.error(`cause`, e.cause); } if (e instanceof MissingReplicationSlotError) { throw e; @@ -144,7 +144,7 @@ export class WalStreamRunner { * This will also release the lock if start() was called earlier. */ async stop(options?: { force?: boolean }) { - micro.logger.info(`${this.slot_name} Stopping replication`); + logger.info(`${this.slot_name} Stopping replication`); // End gracefully this.abortController.abort(); @@ -161,7 +161,7 @@ export class WalStreamRunner { * Stops replication if needed. */ async terminate(options?: { force?: boolean }) { - micro.logger.info(`${this.slot_name} Terminating replication`); + logger.info(`${this.slot_name} Terminating replication`); await this.stop(options); const slotName = this.slot_name; diff --git a/packages/service-core/src/routes/socket-route.ts b/packages/service-core/src/routes/socket-route.ts index 23167e757..cb4ac2077 100644 --- a/packages/service-core/src/routes/socket-route.ts +++ b/packages/service-core/src/routes/socket-route.ts @@ -7,6 +7,7 @@ import { streamResponse } from '../sync/sync.js'; import { SyncRoutes } from './sync-stream.js'; import { SocketRouteGenerator } from './router-socket.js'; import { Metrics } from '../metrics/Metrics.js'; +import { logger } from '../system/Logger.js'; export const sync_stream_reactive: SocketRouteGenerator = (router) => router.reactiveStream(SyncRoutes.STREAM, { @@ -119,7 +120,7 @@ export const sync_stream_reactive: SocketRouteGenerator = (router) => // Convert to our standard form before responding. // This ensures the error can be serialized. const error = new micro.errors.InternalServerError(ex); - micro.logger.error('Sync stream error', error); + logger.error('Sync stream error', error); responder.onError(error); } finally { responder.onComplete(); diff --git a/packages/service-core/src/routes/sync-stream.ts b/packages/service-core/src/routes/sync-stream.ts index 87bb1244d..91aaa3d5d 100644 --- a/packages/service-core/src/routes/sync-stream.ts +++ b/packages/service-core/src/routes/sync-stream.ts @@ -8,6 +8,7 @@ import * as util from '../util/util-index.js'; import { authUser } from './auth.js'; import { RouteGenerator } from './router.js'; import { Metrics } from '../metrics/Metrics.js'; +import { logger } from '../system/Logger.js'; export enum SyncRoutes { STREAM = '/sync/stream' @@ -78,7 +79,7 @@ export const syncStreamed: RouteGenerator = (router) => controller.abort(); // Note: This appears as a 200 response in the logs. if (error.message != 'Shutting down system') { - micro.logger.error('Streaming sync request failed', error); + logger.error('Streaming sync request failed', error); } }); await res.send(stream); diff --git a/packages/service-core/src/runner/teardown.ts b/packages/service-core/src/runner/teardown.ts index 2e283f0fd..896d1b091 100644 --- a/packages/service-core/src/runner/teardown.ts +++ b/packages/service-core/src/runner/teardown.ts @@ -3,13 +3,13 @@ // 1. The replication slots on the source postgres instance (if available). // 2. The mongo database. -import * as micro from '@journeyapps-platform/micro'; import * as timers from 'timers/promises'; import * as db from '../db/db-index.js'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; import * as replication from '../replication/replication-index.js'; +import { logger } from '../system/Logger.js'; /** * Attempt to terminate a single sync rules instance. @@ -63,7 +63,7 @@ async function terminateReplicators( } catch (e) { retry = true; console.error(e); - micro.logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); + logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); } } if (!retry) { diff --git a/packages/service-core/src/storage/MongoBucketStorage.ts b/packages/service-core/src/storage/MongoBucketStorage.ts index 0fadfedfe..94c742198 100644 --- a/packages/service-core/src/storage/MongoBucketStorage.ts +++ b/packages/service-core/src/storage/MongoBucketStorage.ts @@ -1,7 +1,6 @@ import * as mongo from 'mongodb'; import * as timers from 'timers/promises'; import { LRUCache } from 'lru-cache/min'; -import * as micro from '@journeyapps-platform/micro'; import { SqlSyncRules } from '@powersync/service-sync-rules'; import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js'; @@ -25,6 +24,7 @@ import { SyncRuleDocument, SyncRuleState } from './mongo/models.js'; import { generateSlotName } from './mongo/util.js'; import { locks } from '@journeyapps-platform/micro'; import { v4 as uuid } from 'uuid'; +import { logger } from '../system/Logger.js'; export interface MongoBucketStorageOptions extends PowerSyncMongoOptions {} @@ -74,13 +74,13 @@ export class MongoBucketStorage implements BucketStorageFactory { const active = await this.getActiveSyncRulesContent(); if (next?.sync_rules_content == sync_rules) { - micro.logger.info('Sync rules from configuration unchanged'); + logger.info('Sync rules from configuration unchanged'); return { updated: false }; } else if (next == null && active?.sync_rules_content == sync_rules) { - micro.logger.info('Sync rules from configuration unchanged'); + logger.info('Sync rules from configuration unchanged'); return { updated: false }; } else { - micro.logger.info('Sync rules updated from configuration'); + logger.info('Sync rules updated from configuration'); const persisted_sync_rules = await this.updateSyncRules({ content: sync_rules, lock: options?.lock diff --git a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts index 75832cbd4..07ea7ab59 100644 --- a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts +++ b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts @@ -13,6 +13,7 @@ import { MongoIdSequence } from './MongoIdSequence.js'; import { cacheKey, OperationBatch, RecordOperation } from './OperationBatch.js'; import { PersistedBatch } from './PersistedBatch.js'; import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, serializeLookup } from './util.js'; +import { logger } from '../../system/Logger.js'; /** * 15MB @@ -345,7 +346,7 @@ export class MongoBucketBatch implements BucketStorageBatch { } } ); - micro.logger.error( + logger.error( `Failed to evaluate data query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}` ); } @@ -385,7 +386,7 @@ export class MongoBucketBatch implements BucketStorageBatch { } } ); - micro.logger.error( + logger.error( `Failed to evaluate parameter query on ${record.sourceTable.qualifiedName}.${after.id}: ${error.error}` ); } @@ -439,7 +440,7 @@ export class MongoBucketBatch implements BucketStorageBatch { if (e instanceof mongo.MongoError && e.hasErrorLabel('TransientTransactionError')) { // Likely write conflict caused by concurrent write stream replicating } else { - micro.logger.warn('Transaction error', e as Error); + logger.warn('Transaction error', e as Error); } await new Promise((resolve) => setTimeout(resolve, Math.random() * 50)); throw e; @@ -464,7 +465,7 @@ export class MongoBucketBatch implements BucketStorageBatch { await this.withTransaction(async () => { flushTry += 1; if (flushTry % 10 == 0) { - micro.logger.info(`${this.slot_name} ${description} - try ${flushTry}`); + logger.info(`${this.slot_name} ${description} - try ${flushTry}`); } if (flushTry > 20 && Date.now() > lastTry) { throw new Error('Max transaction tries exceeded'); @@ -529,13 +530,11 @@ export class MongoBucketBatch implements BucketStorageBatch { if (this.last_checkpoint_lsn != null && lsn <= this.last_checkpoint_lsn) { // When re-applying transactions, don't create a new checkpoint until // we are past the last transaction. - micro.logger.info(`Re-applied transaction ${lsn} - skipping checkpoint`); + logger.info(`Re-applied transaction ${lsn} - skipping checkpoint`); return false; } if (lsn < this.no_checkpoint_before_lsn) { - micro.logger.info( - `Waiting until ${this.no_checkpoint_before_lsn} before creating checkpoint, currently at ${lsn}` - ); + logger.info(`Waiting until ${this.no_checkpoint_before_lsn} before creating checkpoint, currently at ${lsn}`); return false; } @@ -599,7 +598,7 @@ export class MongoBucketBatch implements BucketStorageBatch { } async save(record: SaveOptions): Promise { - micro.logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`); + logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`); this.batch ??= new OperationBatch(); this.batch.push(new RecordOperation(record)); diff --git a/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts b/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts index f8780e3d7..c407d25d3 100644 --- a/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts +++ b/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts @@ -1,8 +1,8 @@ -import * as micro from '@journeyapps-platform/micro'; import crypto from 'crypto'; import { PersistedSyncRulesContent, ReplicationLock } from '../BucketStorage.js'; import { PowerSyncMongo } from './db.js'; +import { logger } from '../../system/Logger.js'; /** * Manages a lock on a sync rules document, so that only one process @@ -40,7 +40,7 @@ export class MongoSyncRulesLock implements ReplicationLock { try { await this.refresh(); } catch (e) { - micro.logger.error('Failed to refresh lock', e); + logger.error('Failed to refresh lock', e); clearInterval(this.refreshInterval); } }, 30_130); @@ -59,7 +59,7 @@ export class MongoSyncRulesLock implements ReplicationLock { ); if (result.modifiedCount == 0) { // Log and ignore - micro.logger.warn(`Lock already released: ${this.sync_rules_id}/${this.lock_id}`); + logger.warn(`Lock already released: ${this.sync_rules_id}/${this.lock_id}`); } } diff --git a/packages/service-core/src/storage/mongo/PersistedBatch.ts b/packages/service-core/src/storage/mongo/PersistedBatch.ts index e8cb873eb..cbbe04543 100644 --- a/packages/service-core/src/storage/mongo/PersistedBatch.ts +++ b/packages/service-core/src/storage/mongo/PersistedBatch.ts @@ -2,7 +2,6 @@ import { JSONBig } from '@powersync/service-jsonbig'; import { EvaluatedParameters, EvaluatedRow } from '@powersync/service-sync-rules'; import * as bson from 'bson'; import * as mongo from 'mongodb'; -import * as micro from '@journeyapps-platform/micro'; import * as util from '../../util/util-index.js'; import { SourceTable } from '../SourceTable.js'; @@ -17,6 +16,7 @@ import { SourceKey } from './models.js'; import { serializeLookup } from './util.js'; +import { logger } from '../../system/Logger.js'; /** * Maximum size of operations we write in a single transaction. @@ -253,7 +253,7 @@ export class PersistedBatch { }); } - micro.logger.info( + logger.info( `powersync_${this.group_id} Flushed ${this.bucketData.length} + ${this.bucketParameters.length} + ${ this.currentData.length } updates, ${Math.round(this.currentSize / 1024)}kb. Last op_id: ${this.debugLastOpId}` diff --git a/packages/service-core/src/sync/sync.ts b/packages/service-core/src/sync/sync.ts index 818d82a22..55eaf5855 100644 --- a/packages/service-core/src/sync/sync.ts +++ b/packages/service-core/src/sync/sync.ts @@ -1,4 +1,3 @@ -import * as micro from '@journeyapps-platform/micro'; import { JSONBig, JsonContainer } from '@powersync/service-jsonbig'; import { SyncParameters } from '@powersync/service-sync-rules'; import { Semaphore } from 'async-mutex'; @@ -11,6 +10,7 @@ import * as util from '../util/util-index.js'; import { mergeAsyncIterables } from './merge.js'; import { TokenStreamOptions, tokenStream } from './util.js'; import { Metrics } from '../metrics/Metrics.js'; +import { logger } from '../system/Logger.js'; /** * Maximum number of connections actively fetching data. @@ -139,7 +139,7 @@ async function* streamResponseInner( message += `buckets: ${allBuckets.length} | `; message += `updated: ${limitedBuckets(diff.updated_buckets, 20)} | `; message += `removed: ${limitedBuckets(diff.removed_buckets, 20)} | `; - micro.logger.info(message); + logger.info(message); const checksum_line: util.StreamingSyncCheckpointDiff = { checkpoint_diff: { @@ -153,7 +153,7 @@ async function* streamResponseInner( } else { let message = `New checkpoint: ${checkpoint} | write: ${writeCheckpoint} | `; message += `buckets: ${allBuckets.length} ${limitedBuckets(allBuckets, 20)}`; - micro.logger.info(message); + logger.info(message); const checksum_line: util.StreamingSyncCheckpoint = { checkpoint: { last_op_id: checkpoint, @@ -239,7 +239,7 @@ async function* bucketDataBatch( if (r.data.length == 0) { continue; } - micro.logger.debug(`Sending data for ${r.bucket}`); + logger.debug(`Sending data for ${r.bucket}`); let send_data: any; if (binary_data) { diff --git a/packages/service-core/src/system/Logger.ts b/packages/service-core/src/system/Logger.ts new file mode 100644 index 000000000..0e8a307e5 --- /dev/null +++ b/packages/service-core/src/system/Logger.ts @@ -0,0 +1,8 @@ +import winston from 'winston'; + +/** + * Logger instance which is used in the entire codebase. + * This should be configured in the project which consumes the + * core package. + */ +export const logger = winston.createLogger(); diff --git a/packages/service-core/src/system/system-index.ts b/packages/service-core/src/system/system-index.ts new file mode 100644 index 000000000..34886eb26 --- /dev/null +++ b/packages/service-core/src/system/system-index.ts @@ -0,0 +1,2 @@ +export * from './CorePowerSyncSystem.js'; +export * from './Logger.js'; diff --git a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts index 461db9286..c9ce8b233 100644 --- a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts +++ b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts @@ -1,8 +1,8 @@ import * as fs from 'fs/promises'; -import * as micro from '@journeyapps-platform/micro'; import { ConfigCollector, ConfigFileFormat } from '../config-collector.js'; import { RunnerConfig } from '../../types.js'; +import { logger } from '../../../../system/Logger.js'; export class FileSystemConfigCollector extends ConfigCollector { get name(): string { @@ -22,7 +22,7 @@ export class FileSystemConfigCollector extends ConfigCollector { throw new Error(`Config file path ${config_path} was specified, but the file does not exist.`); } - micro.logger.info(`Collecting PowerSync configuration from File: ${config_path}`); + logger.info(`Collecting PowerSync configuration from File: ${config_path}`); const content = await fs.readFile(config_path, 'utf-8'); diff --git a/packages/service-core/src/util/config/compound-config-collector.ts b/packages/service-core/src/util/config/compound-config-collector.ts index 04afa471a..61a2dcb4d 100644 --- a/packages/service-core/src/util/config/compound-config-collector.ts +++ b/packages/service-core/src/util/config/compound-config-collector.ts @@ -1,4 +1,3 @@ -import * as micro from '@journeyapps-platform/micro'; import { configFile, normalizeConnection } from '@powersync/service-types'; import { ConfigCollector } from './collectors/config-collector.js'; import { ResolvedConnection, ResolvedPowerSyncConfig, RunnerConfig, SyncRulesConfig } from './types.js'; @@ -10,6 +9,7 @@ import { Base64SyncRulesCollector } from './sync-rules/impl/base64-sync-rules-co import { InlineSyncRulesCollector } from './sync-rules/impl/inline-sync-rules-collector.js'; import { FileSystemSyncRulesCollector } from './sync-rules/impl/filesystem-sync-rules-collector.js'; import { FallbackConfigCollector } from './collectors/impl/fallback-config-collector.js'; +import { logger } from '../../system/Logger.js'; const POWERSYNC_DEV_KID = 'powersync-dev'; @@ -140,7 +140,7 @@ export class CompoundConfigCollector { if (baseConfig) { return baseConfig; } - micro.logger.debug( + logger.debug( `Could not collect PowerSync config with ${collector.name} method. Moving on to next method if available.` ); } catch (ex) { @@ -161,7 +161,7 @@ export class CompoundConfigCollector { if (config) { return config; } - micro.logger.debug( + logger.debug( `Could not collect sync rules with ${collector.name} method. Moving on to next method if available.` ); } catch (ex) { diff --git a/packages/service-core/src/util/env.ts b/packages/service-core/src/util/env.ts index e1d005e31..1794adf2b 100644 --- a/packages/service-core/src/util/env.ts +++ b/packages/service-core/src/util/env.ts @@ -22,7 +22,9 @@ export const env = utils.collectEnvironmentVariables({ /** * Port for metrics */ - METRICS_PORT: utils.type.number.optional() + METRICS_PORT: utils.type.number.optional(), + + NODE_ENV: utils.type.string.optional() }); export type Env = typeof env; diff --git a/packages/service-core/src/util/memory-tracking.ts b/packages/service-core/src/util/memory-tracking.ts index 44bc6e41c..4e1e6d05c 100644 --- a/packages/service-core/src/util/memory-tracking.ts +++ b/packages/service-core/src/util/memory-tracking.ts @@ -1,4 +1,4 @@ -import * as micro from '@journeyapps-platform/micro'; +import { logger } from '../system/Logger.js'; /** * Track and log memory usage. @@ -57,7 +57,7 @@ export function trackMemoryUsage() { ) )`.replaceAll(/\s+/g, ' '); - micro.logger.info(output); + logger.info(output); } }, 50); } diff --git a/packages/service-core/src/util/pgwire_utils.ts b/packages/service-core/src/util/pgwire_utils.ts index 645224504..80becc70a 100644 --- a/packages/service-core/src/util/pgwire_utils.ts +++ b/packages/service-core/src/util/pgwire_utils.ts @@ -4,9 +4,9 @@ import * as bson from 'bson'; import * as uuid from 'uuid'; import * as pgwire from '@powersync/service-jpgwire'; import { SqliteJsonValue, SqliteRow, ToastableSqliteRow, toSyncRulesRow } from '@powersync/service-sync-rules'; -import * as micro from '@journeyapps-platform/micro'; import * as replication from '../replication/replication-index.js'; +import { logger } from '../system/Logger.js'; /** * pgwire message -> SQLite row. @@ -133,7 +133,7 @@ export async function retriedQuery(db: pgwire.PgClient, ...args: any[]) { if (tries == 1) { throw e; } - micro.logger.warn('Query error, retrying', e); + logger.warn('Query error, retrying', e); } } } diff --git a/packages/service-core/src/util/utils.ts b/packages/service-core/src/util/utils.ts index a10533b86..725c50d56 100644 --- a/packages/service-core/src/util/utils.ts +++ b/packages/service-core/src/util/utils.ts @@ -1,11 +1,11 @@ import crypto from 'crypto'; import * as pgwire from '@powersync/service-jpgwire'; import { pgwireRows } from '@powersync/service-jpgwire'; -import * as micro from '@journeyapps-platform/micro'; import * as storage from '../storage/storage-index.js'; import { BucketChecksum, OpId } from './protocol-types.js'; import { retriedQuery } from './pgwire_utils.js'; +import { logger } from '../system/Logger.js'; export function hashData(type: string, id: string, data: string): number { const hash = crypto.createHash('sha256'); @@ -70,14 +70,14 @@ export async function getClientCheckpoint( const timeout = options?.timeout ?? 50_000; - micro.logger.info(`Waiting for LSN checkpoint: ${lsn}`); + logger.info(`Waiting for LSN checkpoint: ${lsn}`); while (Date.now() - start < timeout) { const cp = await bucketStorage.getActiveCheckpoint(); if (!cp.hasSyncRules()) { throw new Error('No sync rules available'); } if (cp.lsn >= lsn) { - micro.logger.info(`Got write checkpoint: ${lsn} : ${cp.checkpoint}`); + logger.info(`Got write checkpoint: ${lsn} : ${cp.checkpoint}`); return cp.checkpoint; } @@ -97,6 +97,6 @@ export async function createWriteCheckpoint( ); const id = await bucketStorage.createWriteCheckpoint(user_id, { '1': lsn }); - micro.logger.info(`Write checkpoint 2: ${JSON.stringify({ lsn, id: String(id) })}`); + logger.info(`Write checkpoint 2: ${JSON.stringify({ lsn, id: String(id) })}`); return id; } diff --git a/packages/service-core/tsconfig.json b/packages/service-core/tsconfig.json index fd340e1cd..4504307ab 100644 --- a/packages/service-core/tsconfig.json +++ b/packages/service-core/tsconfig.json @@ -3,7 +3,6 @@ "compilerOptions": { "rootDir": "src", "outDir": "dist", - "baseUrl": ".", "esModuleInterop": true, "skipLibCheck": true, "sourceMap": true diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f9430c404..03cf13e90 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -11,27 +11,6 @@ importers: '@changesets/cli': specifier: ^2.27.3 version: 2.27.3 - '@journeyapps-platform/deploy-cli': - specifier: ^4.4.6 - version: 4.4.7(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@5.2.2) - '@journeyapps-platform/deploy-config': - specifier: ^3.1.0 - version: 3.1.0 - '@journeyapps-platform/deploy-transformers': - specifier: ^1.2.32 - version: 1.2.33(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@5.2.2) - '@journeyapps-platform/formatter-cli': - specifier: ^3.1.9 - version: 3.1.9 - '@journeyapps-platform/lerno': - specifier: ^1.0.3 - version: 1.0.3 - '@journeyapps-platform/micro-cli': - specifier: ^7.2.6 - version: 7.2.9(@types/node@18.11.11)(typescript@5.2.2) - '@journeyapps-platform/micro-dev': - specifier: ^1.6.7 - version: 1.6.7 '@types/node': specifier: 18.11.11 version: 18.11.11 @@ -210,13 +189,13 @@ importers: uuid: specifier: ^9.0.1 version: 9.0.1 + winston: + specifier: ^3.13.0 + version: 3.13.0 yaml: specifier: ^2.3.2 version: 2.4.2 devDependencies: - '@journeyapps-platform/micro-dev': - specifier: ^1.6.7 - version: 1.6.7 '@types/uuid': specifier: ^9.0.4 version: 9.0.8 @@ -341,13 +320,13 @@ importers: uuid: specifier: ^9.0.1 version: 9.0.1 + winston: + specifier: ^3.13.0 + version: 3.13.0 yaml: specifier: ^2.3.2 version: 2.4.2 devDependencies: - '@journeyapps-platform/micro-dev': - specifier: ^1.6.7 - version: 1.6.7 '@types/uuid': specifier: ^9.0.4 version: 9.0.8 @@ -447,10 +426,17 @@ packages: resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} engines: {node: '>=0.1.90'} + '@colors/colors@1.6.0': + resolution: {integrity: sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==} + engines: {node: '>=0.1.90'} + '@cspotcode/source-map-support@0.8.1': resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} engines: {node: '>=12'} + '@dabh/diagnostics@2.0.3': + resolution: {integrity: sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==} + '@esbuild/aix-ppc64@0.20.2': resolution: {integrity: sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==} engines: {node: '>=12'} @@ -611,10 +597,6 @@ packages: resolution: {integrity: sha512-vYVqYzHicDqyKB+NQhAc54I1QWCBLCrYG6unqOIcBTHx+7x8C9lcoLj3KVJXs2VB4lUbpWY+Kk9NipcbXYWmvg==} engines: {node: '>=12.10.0'} - '@grpc/grpc-js@1.3.8': - resolution: {integrity: sha512-4qJqqn+CU/nBydz9ePJP+oa8dz0U42Ut/GejlbyaQ1xTkynCc+ndNHHnISlNeHawDsv4MOAyP3mV/EnDNUw2zA==} - engines: {node: ^8.13.0 || >=10.10.0} - '@grpc/proto-loader@0.7.13': resolution: {integrity: sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==} engines: {node: '>=6'} @@ -636,36 +618,6 @@ packages: resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - '@journeyapps-platform/ci-tools@3.0.7': - resolution: {integrity: sha512-DPNy2D4Ts4OllxbKlPs4pFDB6ssV+ougU1grlRfL066OgfV7L2iMZNV6yrxSMwtDVedsFWf4U5TJEuiuIxbg5w==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/ci-tools/3.0.7/ab37dc5e21b8b25d32dfebb4c3b478bfbb205973} - - '@journeyapps-platform/cli-logger@2.14.8': - resolution: {integrity: sha512-FMM2q26rLj0GcNlWv7vgT0zc5EQPFmIR+wFK3y/v+/8za8QD3QS12rq41QHa4tymL7vaRW2lOBHylWzq5BMbmQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/cli-logger/2.14.8/8e699780eb6240fcd755feaf2d6fc70063334ea3} - - '@journeyapps-platform/deploy-cli@4.4.7': - resolution: {integrity: sha512-C3pIC9Y3o43HoWd+plvdZgvBP+k0B8uo6ON40Zp1/20liiGn7PjJlg5mTLa5biSvd+HBQi2wb8HBkw0cJTL+ow==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/deploy-cli/4.4.7/71af64269b0ae288107a62733812b33663716f18} - hasBin: true - - '@journeyapps-platform/deploy-config@3.1.0': - resolution: {integrity: sha512-8AwixXk8oeeIwxjss4wYbUR3S9gEsFSj6mfTtsluQv4q1iVPj/ub33LtTDkhLqNn16z60Mc0Jak4HWpSZ4m09A==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/deploy-config/3.1.0/f85f9a7166f02631258e5552c081499a193f3ca6} - - '@journeyapps-platform/deploy-transformers@1.2.33': - resolution: {integrity: sha512-e5vdvwS3nu7NM4zXC6UR5bEM41grpQRQ7ZkRc7Tw5h1TLAxSAGCa0+lC397GpIK1vSlMikmWBzNXA6jJCdCCcQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/deploy-transformers/1.2.33/d83f4669728006da0806896541883a8948b5720c} - - '@journeyapps-platform/filesystem@3.0.1': - resolution: {integrity: sha512-Dd6i2G1n+tobKNH0fepe89/3p6EE5j0Xzy7Q57opeSqyS7Kn6oMBzc+/LyYgUIKRXKrqHrlpYhwloxMr+5NomA==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/filesystem/3.0.1/1b4f1fe52b484a436ac5c9088af9f2a365e05671} - - '@journeyapps-platform/formatter-cli@3.1.9': - resolution: {integrity: sha512-N0nbxxG9Mo/d2POoiT0gmx0Pr6Oa07rqI4QtFKNnzUn9+e+dsFMkdFFeT+jdHGHStrT2ofJ/6ZJ6eUjsmeTV5A==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/formatter-cli/3.1.9/6a4dc02c1dcd03e9ec5dae924451963c9702c7a5} - hasBin: true - - '@journeyapps-platform/git@1.0.6': - resolution: {integrity: sha512-lX7pUS/G3VKSLf6YoKQ9XNEMbf42hpFCBVGIyAZdyBVV5wWQ0OVpRpsgUz1NdfWALm1BBDxaEN/L6DzStOfSBw==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/git/1.0.6/84d24c81eec8ced3519a2cee71b3b89fa3e7316e} - - '@journeyapps-platform/lerno@1.0.3': - resolution: {integrity: sha512-T5XdYWPipea8nLlczHTG+/c3S+w4kZpDmAQdW81F4/dkwVUKOHrXupLLrNnPlnjaLhmz+RgANfsuYJ/o9q19dw==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/lerno/1.0.3/209cf354e73fe172e78a9898518855f325912ebc} - hasBin: true - '@journeyapps-platform/micro-alerts@4.0.0': resolution: {integrity: sha512-9uRnfTmIUkEx98RPwt6N3+ldmmdmu9oQ0Em+51efb+dGZd8EKq6eBlM1pXo5ud/5wKBoKz4LH/EYsgYY01Q6HQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-alerts/4.0.0/805317cde6a7fea1dc94b2b398c86b7133513e76} @@ -675,25 +627,12 @@ packages: '@journeyapps-platform/micro-authorizers@6.0.0': resolution: {integrity: sha512-uc7+OAVROx1llHYwWCeH35198Avw6gAxJBEdD65zUISL2c4xhTlwhBH1JB6l+jEHKwowvHTyjJxUfa6L5ijsSg==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-authorizers/6.0.0/29a593a88f11e19b7cdb55883106d62f42ed9fa1} - '@journeyapps-platform/micro-cli@7.2.9': - resolution: {integrity: sha512-H/tNoMVxGWqF9GZ8rmWA4bxSTWGUNDPsBwK6vCZtvjVZjW/pKrxwNVHAu4d+6NXlNa8mIt2NNcuS99F3iHyP6A==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-cli/7.2.9/70b53f1952b1fcb9077fd79379b42a23094bb090} - hasBin: true - - '@journeyapps-platform/micro-codecs@2.3.0': - resolution: {integrity: sha512-lKf/HDekssTruw0xxsvaXFMnywXDYV31mTLpqCjXm+vidOmr3rCfWcmqXN6vRMuLp8TJjgu8wruOMNukiQ/DsQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-codecs/2.3.0/aca109f7d64fac4b08013b462301c8ccb5afc6a9} - '@journeyapps-platform/micro-codecs@3.0.1': resolution: {integrity: sha512-AdU5P3TdiI5Z77oyUEa2aUnhHX+sU70GpoQbZm2YVZo5wQTi+Eds0JXxZR84Nk2iLhcoCtziXA87NtwVgoVR5g==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-codecs/3.0.1/b9a15997e1a27f908840f73ee25724348af14e72} '@journeyapps-platform/micro-db@3.0.2': resolution: {integrity: sha512-x0cJCmcm9jB4xQ0Bx4psxAVCZ8HmBXRlN1gC/2p4qRzuw9YpLE8/zV0ElTtotzB2fUVSRzuBqgdrHXY0E4ru4w==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-db/3.0.2/5b224878515b4a3f13d847c3dd9fd0495ffd3339} - '@journeyapps-platform/micro-dev@1.6.7': - resolution: {integrity: sha512-7GcU2VUl1JLPEtYaixtNAm3KC/ghPTHj+Eq8alp9HiGffCUdaDhQ5jKfBYwIIRMT+kpMpS4Ggx4ubD80DBR4FA==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-dev/1.6.7/83adf849451a842e0593be8f7d2e5707f8e95d2b} - - '@journeyapps-platform/micro-errors@2.6.0': - resolution: {integrity: sha512-NFBUNLot19Qoom0jf6ts8BbuKAYpBmf37GsfS2uL60HRW9cKYWOa7WmK7vPnF46rgUxerYyvgw11IESwd86MEw==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-errors/2.6.0/7f538d8c79f6a466a33f116222f3832f7564f83f} - '@journeyapps-platform/micro-errors@3.0.0': resolution: {integrity: sha512-cAswdlJ3QGsF3EKTNq4EX6JhIJeVg+xHJBzupUiADHMOGbldW7O01ewp/iIsYA0d4GYBERtivOCsXrGZUQax+g==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-errors/3.0.0/cd67505976c54b6aa3e40635faeab0dc8baa4baf} @@ -738,9 +677,6 @@ packages: '@journeyapps-platform/micro-router@7.0.1': resolution: {integrity: sha512-q9cvq8ZXGd2BZCCZBGUWNK/P/3jHZPey3tvjV+Usb5pBUhzu7BihL7uPuj/s5RpscMbd9rtBWXqz7oY5gKLV1w==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-router/7.0.1/e1e8aaea2efeb32fc7da457cdb6f099ebeecb272} - '@journeyapps-platform/micro-schema@5.1.1': - resolution: {integrity: sha512-E0qUptSLHbzwL3STE3SLyaFgwYPJl5kjl73A0V0db7nacv0xNuhLBggM9t754rbUSiZZQx69eGQhcYMqJkYqSw==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-schema/5.1.1/a5a7fb36a3878771292285e349e4f828a673f1f3} - '@journeyapps-platform/micro-schema@6.0.1': resolution: {integrity: sha512-KRZFM4rUhAPqpizfyazX+eWAlf1BwOYibA2ealXKQqXgaTvxTV7oqObmvzhFr4cDuYXV3E+isW/FgOyspjuvYg==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-schema/6.0.1/108672fbde6f092f49a432df20eca259aaf18be0} @@ -766,15 +702,9 @@ packages: resolution: {integrity: sha512-+wrmB7sDPL6RkL011bKEYwAcqDGRJoXtKB1V1jhOz1P/VyMAda6EsAgcBewPpkvIg9q+JQWvwEg/HvJ32pKKJQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro/17.0.1/32ed182dcbda322fa5d9de94827b711a278de997} hasBin: true - '@journeyapps-platform/shell-tools@3.0.1': - resolution: {integrity: sha512-Kah+zbXs/ShuJuIPDeF8oqQU0Jo1hPpT37JkwSqtuNv2pTgf9gx8zKw4iqh0Tk7BdFt66wbqrbRa//BPKiWeuA==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/shell-tools/3.0.1/7800020990e903d9624dad8f338aaf1429a5b4f5} - '@journeyapps-platform/types-micro-ws@2.0.0': resolution: {integrity: sha512-91rA17Orl1Nv55mbEzhXWOfIBidb1hUWNoaXiEeaVQ/M+0HuDj8a+54JpnmIafG1Wp3HNF5ICX2UcmIzTKHB+A==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/types-micro-ws/2.0.0/89852c246163426360bfe862b41cbaba87198ed5} - '@journeyapps-platform/workspace@1.0.3': - resolution: {integrity: sha512-r5svVxhFO2k8/VpoEqnkmWzAp84zzv9tJtczXSd0gIHH1F68YB39JgeIH9W0fPhKryWDpR6lGRuuhghgoPCiAg==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/workspace/1.0.3/1d4d84d327be30e88edff6a76441a08d45a072ae} - '@jridgewell/resolve-uri@3.1.2': resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} engines: {node: '>=6.0.0'} @@ -795,28 +725,12 @@ packages: resolution: {integrity: sha512-/gKJun8NNiWGZJkGzI/Ragc53cOdcLNdzjLaIa+GEjguQs0ulsurx8WN0jijdK9yPqDvziX995sMRLyLt1uZMQ==} engines: {node: '>= 0.4'} - '@logdna/tail-file@2.2.0': - resolution: {integrity: sha512-XGSsWDweP80Fks16lwkAUIr54ICyBs6PsI4mpfTLQaWgEJRtY9xEV+PeyDpJ+sJEGZxqINlpmAwe/6tS1pP8Ng==} - engines: {node: '>=10.3.0'} - '@manypkg/find-root@1.1.0': resolution: {integrity: sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA==} - '@manypkg/find-root@2.2.1': - resolution: {integrity: sha512-34NlypD5mmTY65cFAK7QPgY5Tzt0qXR4ZRXdg97xAlkiLuwXUPBEXy5Hsqzd+7S2acsLxUz6Cs50rlDZQr4xUA==} - engines: {node: '>=14.18.0'} - '@manypkg/get-packages@1.1.3': resolution: {integrity: sha512-fo+QhuU3qE/2TQMQmbVMqaQ6EWbMhi4ABWP+O4AM1NqPBuy0OrApV5LO6BrrgnhtAHS2NH6RrVk9OL181tTi8A==} - '@manypkg/get-packages@2.2.1': - resolution: {integrity: sha512-TrJd86paBkKEx6InhObcUhuoJNcATlbO6+s1dQdLd4+Y1SLDKJUAMhU46kTZ1SOFbegTuhDbIF3j+Jy564BERA==} - engines: {node: '>=14.18.0'} - - '@manypkg/tools@1.1.0': - resolution: {integrity: sha512-SkAyKAByB9l93Slyg8AUHGuM2kjvWioUTCckT/03J09jYnfEzMO/wSXmEhnKGYs6qx9De8TH4yJCl0Y9lRgnyQ==} - engines: {node: '>=14.18.0'} - '@mongodb-js/saslprep@1.1.7': resolution: {integrity: sha512-dCHW/oEX0KJ4NjDULBo3JiOaK5+6axtpBbS+ao2ZInoAL9/YRQLhXzSNAFz7hP4nzLkIqsfYAK/PDE3+XHny0Q==} @@ -866,72 +780,10 @@ packages: resolution: {integrity: sha512-NCcr1uQo1k5U+SYlnIrbAh3cxy+OQT1VtqiAbxdymSlptbzBb62AjH2xXgjNCoP073hoa1CfCAcwoZ8k96C4nA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - '@octokit/auth-token@3.0.4': - resolution: {integrity: sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ==} - engines: {node: '>= 14'} - - '@octokit/core@4.2.4': - resolution: {integrity: sha512-rYKilwgzQ7/imScn3M9/pFfUf4I1AZEH3KhyJmtPdE2zfaXAn2mFfUy4FbKewzc2We5y/LlKLj36fWJLKC2SIQ==} - engines: {node: '>= 14'} - - '@octokit/endpoint@7.0.6': - resolution: {integrity: sha512-5L4fseVRUsDFGR00tMWD/Trdeeihn999rTMGRMC1G/Ldi1uWlWJzI98H4Iak5DB/RVvQuyMYKqSK/R6mbSOQyg==} - engines: {node: '>= 14'} - - '@octokit/graphql@5.0.6': - resolution: {integrity: sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw==} - engines: {node: '>= 14'} - - '@octokit/openapi-types@18.1.1': - resolution: {integrity: sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==} - - '@octokit/plugin-paginate-rest@6.1.2': - resolution: {integrity: sha512-qhrmtQeHU/IivxucOV1bbI/xZyC/iOBhclokv7Sut5vnejAIAEXVcGQeRpQlU39E0WwK9lNvJHphHri/DB6lbQ==} - engines: {node: '>= 14'} - peerDependencies: - '@octokit/core': '>=4' - - '@octokit/plugin-request-log@1.0.4': - resolution: {integrity: sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==} - peerDependencies: - '@octokit/core': '>=3' - - '@octokit/plugin-rest-endpoint-methods@7.2.3': - resolution: {integrity: sha512-I5Gml6kTAkzVlN7KCtjOM+Ruwe/rQppp0QU372K1GP7kNOYEKe8Xn5BW4sE62JAHdwpq95OQK/qGNyKQMUzVgA==} - engines: {node: '>= 14'} - peerDependencies: - '@octokit/core': '>=3' - - '@octokit/request-error@3.0.3': - resolution: {integrity: sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==} - engines: {node: '>= 14'} - - '@octokit/request@6.2.8': - resolution: {integrity: sha512-ow4+pkVQ+6XVVsekSYBzJC0VTVvh/FCTUUgTsboGq+DTeWdyIFV8WSCdo0RIxk6wSkBTHqIK1mYuY7nOBXOchw==} - engines: {node: '>= 14'} - - '@octokit/rest@19.0.13': - resolution: {integrity: sha512-/EzVox5V9gYGdbAI+ovYj3nXQT1TtTHRT+0eZPcuC05UFSWO3mdO9UY1C0i2eLF9Un1ONJkAk+IEtYGAC+TahA==} - engines: {node: '>= 14'} - - '@octokit/tsconfig@1.0.2': - resolution: {integrity: sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA==} - - '@octokit/types@10.0.0': - resolution: {integrity: sha512-Vm8IddVmhCgU1fxC1eyinpwqzXPEYu0NrYzD3YZjlGjyftdLBTeqNblRC0jmJmgxbJIsQlyogVeGnrNaaMVzIg==} - - '@octokit/types@9.3.2': - resolution: {integrity: sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==} - '@opentelemetry/api-logs@0.51.1': resolution: {integrity: sha512-E3skn949Pk1z2XtXu/lxf6QAZpawuTM/IUEXcAzpiUkTd73Hmvw26FiN3cJuTmkpM5hZzHwkomVdtrh/n/zzwA==} engines: {node: '>=14'} - '@opentelemetry/api-metrics@0.32.0': - resolution: {integrity: sha512-g1WLhpG8B6iuDyZJFRGsR+JKyZ94m5LEmY2f+duEJ9Xb4XRlLHrZvh6G34OH6GJ8iDHxfHb/sWjJ1ZpkI9yGMQ==} - engines: {node: '>=14'} - deprecated: Please use @opentelemetry/api >= 1.3.0 - '@opentelemetry/api@1.3.0': resolution: {integrity: sha512-YveTnGNsFFixTKJz09Oi4zYkiLT5af3WpZDu4aIUM7xX+2bHAkOJayFTVQd6zB8kkWPpbua4Ha6Ql00grdLlJQ==} engines: {node: '>=8.0.0'} @@ -944,12 +796,6 @@ packages: resolution: {integrity: sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==} engines: {node: '>=8.0.0'} - '@opentelemetry/context-async-hooks@1.24.1': - resolution: {integrity: sha512-R5r6DO4kgEOVBxFXhXjwospLQkv+sYxwCfjvoZBe7Zm6KKXAV9kDSJhi/D1BweowdZmO+sdbENLs374gER8hpQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.9.0' - '@opentelemetry/context-async-hooks@1.8.0': resolution: {integrity: sha512-ueLmocbWDi1aoU4IPdOQyt4qz/Dx+NYyU4qoa3d683usbnkDLUXYXJFfKIMPFV2BbrI5qtnpTtzErCKewoM8aw==} engines: {node: '>=14'} @@ -1028,24 +874,12 @@ packages: peerDependencies: '@opentelemetry/api': ^1.0.0 - '@opentelemetry/exporter-zipkin@1.24.1': - resolution: {integrity: sha512-+Rl/VFmu2n6eaRMnVbyfZx1DqR/1KNyWebYuHyQBZaEAVIn/ZLgmofRpXN1X2nhJ4BNaptQUNxAstCYYz6dKoQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - '@opentelemetry/exporter-zipkin@1.8.0': resolution: {integrity: sha512-Y3WqNCZjfWKnHiRzb35sXpDfGL4Gx2qajFAv059s/VFayIPytLHUOrZMiQqrpfzU/TSIKPG4OHJaypFtUtNlQQ==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 - '@opentelemetry/instrumentation-grpc@0.32.0': - resolution: {integrity: sha512-Az6wdkPx/Mi26lT9LKFV6GhCA9prwQFPz5eCNSExTnSP49YhQ7XCjzPd2POPeLKt84ICitrBMdE1mj0zbPdLAQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - '@opentelemetry/instrumentation-http@0.34.0': resolution: {integrity: sha512-sZxpYOggRIFwdcdy1wWBGG8fwiuWWK4j3qv/rdqTwcPvrVT4iSCoPNDMZYxOcxSEP1fybq28SK43e+IKwxVElQ==} engines: {node: '>=14'} @@ -1058,12 +892,6 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation@0.32.0': - resolution: {integrity: sha512-y6ADjHpkUz/v1nkyyYjsQa/zorhX+0qVGpFvXMcbjU4sHnBnC02c6wcc93sIgZfiQClIWo45TGku1KQxJ5UUbQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - '@opentelemetry/instrumentation@0.34.0': resolution: {integrity: sha512-VET/bOh4StOQV4vf1sAvn2JD67BhW2vPZ/ynl2gHXyafme2yB8Hs9+tr1TLzFwNGo7jwMFviFQkZjCYxMuK0AA==} engines: {node: '>=14'} @@ -1106,24 +934,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.9.0' - '@opentelemetry/propagator-b3@1.24.1': - resolution: {integrity: sha512-nda97ZwhpZKyUJTXqQuKzNhPMUgMLunbbGWn8kroBwegn+nh6OhtyGkrVQsQLNdVKJl0KeB5z0ZgeWszrYhwFw==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.9.0' - '@opentelemetry/propagator-b3@1.8.0': resolution: {integrity: sha512-ffP6AVHyISqK1kiUY1MoVKt43Wp3FJXI8NOePqxBrAU7bRDJ13276VbSl4ugCZbZLTPrPhhSmvQh1WqlfUgcAg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.4.0' - '@opentelemetry/propagator-jaeger@1.24.1': - resolution: {integrity: sha512-7bRBJn3FG1l195A1m+xXRHvgzAOBsfmRi9uZ5Da18oTh7BLmNDiA8+kpk51FpTsU1PCikPVpRDNPhKVB6lyzZg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.9.0' - '@opentelemetry/propagator-jaeger@1.8.0': resolution: {integrity: sha512-v6GA38k2cqeGAh3368prLW5MsuG2/KxpfWI/PxTPjCa9tThDPq0cvhKpk7cEma3y+F6rieMhwmzZhKQL5QVBzQ==} engines: {node: '>=14'} @@ -1191,12 +1007,6 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.4.0' - '@opentelemetry/sdk-trace-node@1.24.1': - resolution: {integrity: sha512-/FZX8uWaGIAwsDhqI8VvQ+qWtfMNlXjaFYGc+vmxgdRFppCSSIRwrPyIhJO1qx61okyYhoyxVEZAfoiNxrfJCg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.9.0' - '@opentelemetry/sdk-trace-node@1.8.0': resolution: {integrity: sha512-6FqhJEgW9Nke5SO4Ul9+5EWOfms/JeLg5LRqILMPMK4UMBWcOtk7jldvGGyfVpraJ16/WPo/R5NSnMwlupN5zQ==} engines: {node: '>=14'} @@ -1211,10 +1021,6 @@ packages: resolution: {integrity: sha512-VkliWlS4/+GHLLW7J/rVBA00uXus1SWvwFvcUDxDwmFxYfg/2VI6ekwdXS28cjI8Qz2ky2BzG8OUHo+WeYIWqw==} engines: {node: '>=14'} - '@opentelemetry/semantic-conventions@1.6.0': - resolution: {integrity: sha512-aPfcBeLErM/PPiAuAbNFLN5sNbZLc3KZlar27uohllN8Zs6jJbHyJU1y7cMA6W/zuq+thkaG8mujiS+3iD/FWQ==} - engines: {node: '>=14'} - '@opentelemetry/semantic-conventions@1.8.0': resolution: {integrity: sha512-TYh1MRcm4JnvpqtqOwT9WYaBYY4KERHdToxs/suDTLviGRsQkIjS5yYROTYTSJQUnYLOn/TuOh5GoMwfLSU+Ew==} engines: {node: '>=14'} @@ -1265,13 +1071,6 @@ packages: '@protobufjs/utf8@1.1.0': resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} - '@pulumi/pulumi@3.51.1': - resolution: {integrity: sha512-h2dbLs04D/qw+pv4er/u9UWyZ8JTvT1RRzAiIrZvicbLK+PfBg9cbTLL76DVLmNbMIOmJPcqwHQDlcqQbPnI/w==} - engines: {node: '>=8.13.0 || >=10.10.0'} - - '@pulumi/query@0.3.0': - resolution: {integrity: sha512-xfo+yLRM2zVjVEA4p23IjQWzyWl1ZhWOGobsBqRpIarzLvwNH/RAGaoehdxlhx4X92302DrpdIFgTICMN4P38w==} - '@rollup/rollup-android-arm-eabi@4.18.0': resolution: {integrity: sha512-Tya6xypR10giZV1XzxmH5wr25VcZSncG0pZIjfePT0OVBvqNEurzValetGNarVrGiq66EBVAFn15iYX4w6FKgQ==} cpu: [arm] @@ -1505,6 +1304,9 @@ packages: '@types/strip-json-comments@0.0.30': resolution: {integrity: sha512-7NQmHra/JILCd1QqpSzl8+mJRc8ZHz3uDm8YV1Ks9IhK0epEiTw8aIErbvH9PI+6XbqhyIQy3462nEsn7UVzjQ==} + '@types/triple-beam@1.3.5': + resolution: {integrity: sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==} + '@types/uuid@9.0.8': resolution: {integrity: sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==} @@ -1517,9 +1319,6 @@ packages: '@types/ws@8.2.3': resolution: {integrity: sha512-ahRJZquUYCdOZf/rCsWg88S0/+cb9wazUBHv6HZEe3XdYaBe2zr/slM8J28X07Hn88Pnm4ezo7N8/ofnOgrPVQ==} - '@types/yoga-layout@1.9.2': - resolution: {integrity: sha512-S9q47ByT2pPvD65IvrWp7qppVMpk9WGMbVq9wbWZOHg6tnXSD4vyhao6nOSBwwfDdV2p3Kx9evA9vI+XWTfDvw==} - '@vitest/expect@0.34.6': resolution: {integrity: sha512-QUzKpUQRc1qC7qdGo7rMK3AkETI7w18gTCUrsNnyjjJKYiuUB9+TQK3QnR1unhCnWRC0AbKv2omLGQDF/mIjOw==} @@ -1663,10 +1462,6 @@ packages: resolution: {integrity: sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==} engines: {node: '>= 0.4'} - array.prototype.reduce@1.0.7: - resolution: {integrity: sha512-mzmiUCVwtiD4lgxYP8g7IYy8El8p2CSMePvIbTS7gchKir/L1fgJrk0yDKmAX6mnRQFKNADYIk8nNlTris5H1Q==} - engines: {node: '>= 0.4'} - arraybuffer.prototype.slice@1.0.3: resolution: {integrity: sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==} engines: {node: '>= 0.4'} @@ -1675,16 +1470,9 @@ packages: resolution: {integrity: sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==} engines: {node: '>=0.10.0'} - asap@2.0.6: - resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} - assertion-error@1.1.0: resolution: {integrity: sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==} - astral-regex@2.0.0: - resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} - engines: {node: '>=8'} - async-mutex@0.5.0: resolution: {integrity: sha512-1A94B18jkJ3DYq284ohPxoXbfTA5HsQ7/Mf4DEhcyLx3Bz27Rh59iScbB6EPiP+B+joue6YCxcMXSbFC1tZKwA==} @@ -1695,10 +1483,6 @@ packages: resolution: {integrity: sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==} engines: {node: '>=8.0.0'} - auto-bind@4.0.0: - resolution: {integrity: sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ==} - engines: {node: '>=8'} - available-typed-arrays@1.0.7: resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==} engines: {node: '>= 0.4'} @@ -1712,9 +1496,6 @@ packages: base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - before-after-hook@2.2.3: - resolution: {integrity: sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==} - better-ajv-errors@1.2.0: resolution: {integrity: sha512-UW+IsFycygIo7bclP9h5ugkNH8EjCSgqyFB/yQ4Hqqa1OEYDtb0uFIkYE0b6+CjkgJYVM5UKI/pJPxjYe9EZlA==} engines: {node: '>= 12.13.0'} @@ -1753,10 +1534,6 @@ packages: breakword@1.0.6: resolution: {integrity: sha512-yjxDAYyK/pBvws9H4xKYpLDpYKEH6CzrBPAuXq3x18I+c/2MkVtT3qAr7Oloi6Dss9qNhPVueAAVU1CSeNDIXw==} - bson@4.7.2: - resolution: {integrity: sha512-Ry9wCtIZ5kGqkJoi6aD8KjxFZEx78guTQDnpXWiNthsxzrxAK/i8E6pCHAIZTbaEFWcOCvbecMukfK7XUvyLpQ==} - engines: {node: '>=6.9.0'} - bson@6.7.0: resolution: {integrity: sha512-w2IquM5mYzYZv6rs3uN2DZTOBe2a0zXLj53TGDqwF4l6Sz/XsISrisXOJihArF9+BZ6Cq/GjVht7Sjfmri7ytQ==} engines: {node: '>=16.20.1'} @@ -1847,9 +1624,6 @@ packages: resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} engines: {node: '>=10'} - ci-info@2.0.0: - resolution: {integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==} - ci-info@3.9.0: resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} engines: {node: '>=8'} @@ -1858,10 +1632,6 @@ packages: resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} engines: {node: '>=6'} - cli-boxes@2.2.1: - resolution: {integrity: sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==} - engines: {node: '>=6'} - cli-boxes@3.0.0: resolution: {integrity: sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==} engines: {node: '>=10'} @@ -1878,14 +1648,6 @@ packages: resolution: {integrity: sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==} engines: {node: 10.* || >= 12.*} - cli-truncate@2.1.0: - resolution: {integrity: sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==} - engines: {node: '>=8'} - - cli-width@3.0.0: - resolution: {integrity: sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==} - engines: {node: '>= 10'} - cli-width@4.1.0: resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} engines: {node: '>= 12'} @@ -1904,10 +1666,6 @@ packages: resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} engines: {node: '>=0.8'} - code-excerpt@3.0.0: - resolution: {integrity: sha512-VHNTVhd7KsLGOqfX3SyeO8RyYPMp1GJOg194VITk04WMYCv4plV68YWe6TJZxd9MhobjtpMRnVky01gqZsalaw==} - engines: {node: '>=10'} - color-convert@1.9.3: resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} @@ -1921,10 +1679,19 @@ packages: color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + color-string@1.9.1: + resolution: {integrity: sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==} + color-support@1.1.3: resolution: {integrity: sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==} hasBin: true + color@3.2.1: + resolution: {integrity: sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==} + + colorspace@1.1.4: + resolution: {integrity: sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==} + commander@10.0.1: resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} engines: {node: '>=14'} @@ -1965,10 +1732,6 @@ packages: resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} engines: {node: '>= 0.6'} - convert-to-spaces@1.0.2: - resolution: {integrity: sha512-cj09EBuObp9gZNQCzc7hByQyrs6jVGE+o9kSJmeUoj+GiPiJvi5LYqEH/Hmme4+MTLHM+Ejtq+FChpjjEnsPdQ==} - engines: {node: '>= 4'} - cookie-signature@1.0.6: resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} @@ -2051,10 +1814,6 @@ packages: supports-color: optional: true - debuglog@1.0.1: - resolution: {integrity: sha512-syBZ+rnAK3EgMsH2aYEOLUW7mZSY9Gb+0wUMCFsZvcmiz+HigA0LOcq/HoQqVuGG+EKykunc7QG2bzrponfaSw==} - deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. - decamelize-keys@1.1.1: resolution: {integrity: sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==} engines: {node: '>=0.10.0'} @@ -2086,10 +1845,6 @@ packages: resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==} engines: {node: '>= 0.4'} - define-lazy-prop@2.0.0: - resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} - engines: {node: '>=8'} - define-properties@1.2.1: resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} engines: {node: '>= 0.4'} @@ -2101,9 +1856,6 @@ packages: resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} engines: {node: '>= 0.8'} - deprecation@2.3.1: - resolution: {integrity: sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==} - destroy@1.2.0: resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} @@ -2112,17 +1864,10 @@ packages: resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} engines: {node: '>=8'} - dezalgo@1.0.4: - resolution: {integrity: sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==} - diff-sequences@29.6.3: resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - diff@3.5.0: - resolution: {integrity: sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==} - engines: {node: '>=0.3.1'} - diff@4.0.2: resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} engines: {node: '>=0.3.1'} @@ -2160,6 +1905,9 @@ packages: emoji-regex@9.2.2: resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + enabled@2.0.0: + resolution: {integrity: sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==} + encodeurl@1.0.2: resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} engines: {node: '>= 0.8'} @@ -2188,9 +1936,6 @@ packages: resolution: {integrity: sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==} engines: {node: '>= 0.4'} - es-array-method-boxes-properly@1.0.0: - resolution: {integrity: sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==} - es-define-property@1.0.0: resolution: {integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==} engines: {node: '>= 0.4'} @@ -2234,10 +1979,6 @@ packages: resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} engines: {node: '>=0.8.0'} - escape-string-regexp@2.0.0: - resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} - engines: {node: '>=8'} - esprima@4.0.1: resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} engines: {node: '>=4'} @@ -2258,10 +1999,6 @@ packages: resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} engines: {node: '>=0.8.x'} - execa@5.1.1: - resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} - engines: {node: '>=10'} - exponential-backoff@3.1.1: resolution: {integrity: sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==} @@ -2314,14 +2051,13 @@ packages: fastq@1.17.1: resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} + fecha@4.2.3: + resolution: {integrity: sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==} + fetch-blob@3.2.0: resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} engines: {node: ^12.20 || >= 14.13} - figures@3.2.0: - resolution: {integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==} - engines: {node: '>=8'} - fill-range@7.1.1: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} @@ -2345,6 +2081,9 @@ packages: find-yarn-workspace-root2@1.2.16: resolution: {integrity: sha512-hr6hb1w8ePMpPVUK39S4RlwJzi+xPLuVuG8XlwXU3KD5Yn3qgBWVfy3AzNlDhWvE1EORCE65/Qm26rFQt3VLVA==} + fn.name@1.1.0: + resolution: {integrity: sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==} + for-each@0.3.3: resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==} @@ -2375,10 +2114,6 @@ packages: from@0.1.7: resolution: {integrity: sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g==} - fs-extra@11.2.0: - resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==} - engines: {node: '>=14.14'} - fs-extra@7.0.1: resolution: {integrity: sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==} engines: {node: '>=6 <7 || >=8'} @@ -2474,9 +2209,6 @@ packages: globrex@0.1.2: resolution: {integrity: sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==} - google-protobuf@3.21.2: - resolution: {integrity: sha512-3MSOYFO5U9mPGikIYCzK0SaThypfGgS6bHqrUGXG3DPHCrb+txNqeEcns1W0lkGfk0rCyNXm7xB9rMxnCiZOoA==} - gopd@1.0.1: resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} @@ -2572,10 +2304,6 @@ packages: human-id@1.0.2: resolution: {integrity: sha512-UNopramDEhHJD+VR+ehk8rOslwSfByxPIZyJRfV739NDhN5LF1fa1MqnzKm2lGTQRjNrjK19Q5fhkgIfjlVUKw==} - human-signals@2.1.0: - resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} - engines: {node: '>=10.17.0'} - humanize-ms@1.2.1: resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} @@ -2637,20 +2365,6 @@ packages: resolution: {integrity: sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - ink@3.2.0: - resolution: {integrity: sha512-firNp1q3xxTzoItj/eOOSZQnYSlyrWks5llCTVX37nJ59K3eXbQ8PtzCguqo8YI19EELo5QxaKnJd4VxzhU8tg==} - engines: {node: '>=10'} - peerDependencies: - '@types/react': '>=16.8.0' - react: '>=16.8.0' - peerDependenciesMeta: - '@types/react': - optional: true - - inquirer@8.2.6: - resolution: {integrity: sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg==} - engines: {node: '>=12.0.0'} - inquirer@9.2.22: resolution: {integrity: sha512-SqLLa/Oe5rZUagTR9z+Zd6izyatHglbmbvVofo1KzuVB54YHleWzeHNLoR7FOICGOeQSqeLh1cordb3MzhGcEw==} engines: {node: '>=18'} @@ -2678,6 +2392,9 @@ packages: is-arrayish@0.2.1: resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + is-arrayish@0.3.2: + resolution: {integrity: sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==} + is-bigint@1.0.4: resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==} @@ -2693,10 +2410,6 @@ packages: resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} engines: {node: '>= 0.4'} - is-ci@2.0.0: - resolution: {integrity: sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==} - hasBin: true - is-ci@3.0.1: resolution: {integrity: sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==} hasBin: true @@ -2712,11 +2425,6 @@ packages: resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==} engines: {node: '>= 0.4'} - is-docker@2.2.1: - resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} - engines: {node: '>=8'} - hasBin: true - is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} @@ -2768,10 +2476,6 @@ packages: resolution: {integrity: sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==} engines: {node: '>=0.10.0'} - is-plain-object@5.0.0: - resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} - engines: {node: '>=0.10.0'} - is-regex@1.1.4: resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==} engines: {node: '>= 0.4'} @@ -2814,10 +2518,6 @@ packages: resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} engines: {node: '>=0.10.0'} - is-wsl@2.2.0: - resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} - engines: {node: '>=8'} - is-yarn-global@0.4.1: resolution: {integrity: sha512-/kppl+R+LO5VmhYSEWARUFjodS25D68gvj8W7z0I7OWhUla5xWu8KL6CtB2V0R6yqhnRgbcaREMr4EEM6htLPQ==} engines: {node: '>=12'} @@ -2892,9 +2592,6 @@ packages: jsonfile@4.0.0: resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} - jsonfile@6.1.0: - resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} - jsonlines@0.1.1: resolution: {integrity: sha512-ekDrAGso79Cvf+dtm+mL8OBI2bmAOt3gssYs833De/C9NmIpWDWyUO4zPgB5x2/OhY366dkhgfPMYfwZF7yOZA==} @@ -2921,6 +2618,9 @@ packages: resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} engines: {node: '>=6'} + kuler@2.0.0: + resolution: {integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==} + latest-version@7.0.0: resolution: {integrity: sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==} engines: {node: '>=14.16'} @@ -2977,6 +2677,10 @@ packages: resolution: {integrity: sha512-p1Ow0C2dDJYaQBhRHt+HVMP6ELuBm4jYSYNHPMfz0J5wJ9qA6/7oBOlBZBfT1InqguTYcvJzNea5FItDxTcbyw==} hasBin: true + logform@2.6.0: + resolution: {integrity: sha512-1ulHeNPp6k/LD8H91o7VYFBng5i1BDE7HoKxVbZiGFidS1Rj65qcywLxX+pVfAPoQJEjRdvKcusKwOupHCVOVQ==} + engines: {node: '>= 12.0.0'} + long@2.4.0: resolution: {integrity: sha512-ijUtjmO/n2A5PaosNG9ZGDsQ3vxJg7ZW8vsY8Kp0f2yIZWhSJvjmegV7t+9RPQKxKrvj8yKGehhS+po14hPLGQ==} engines: {node: '>=0.6'} @@ -2984,10 +2688,6 @@ packages: long@5.2.3: resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==} - loose-envify@1.4.0: - resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} - hasBin: true - lossless-json@2.0.11: resolution: {integrity: sha512-BP0vn+NGYvzDielvBZaFain/wgeJ1hTvURCqtKvhr1SCPePdaaTanmmcplrHfEJSJOUql7hk4FHwToNJjWRY3g==} @@ -3048,9 +2748,6 @@ packages: merge-descriptors@1.0.1: resolution: {integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==} - merge-stream@2.0.0: - resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} @@ -3157,10 +2854,6 @@ packages: resolution: {integrity: sha512-5H76ANWinB1H3twpJ6JY8uvAtpmFvHNArpilJAjXRKXSDDLPIMoZArw5SH0q9z+lLs8IrMw7Q2VWpWimFKFT1Q==} engines: {node: '>= 8.0.0'} - mkdirp@0.5.6: - resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} - hasBin: true - mkdirp@1.0.4: resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} engines: {node: '>=10'} @@ -3217,9 +2910,6 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} - mute-stream@0.0.8: - resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} - mute-stream@1.0.0: resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -3306,9 +2996,6 @@ packages: resolution: {integrity: sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - npm-normalize-package-bin@1.0.1: - resolution: {integrity: sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==} - npm-normalize-package-bin@3.0.1: resolution: {integrity: sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -3329,10 +3016,6 @@ packages: resolution: {integrity: sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - npm-run-path@4.0.1: - resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} - engines: {node: '>=8'} - npmlog@6.0.2: resolution: {integrity: sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} @@ -3353,10 +3036,6 @@ packages: resolution: {integrity: sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==} engines: {node: '>= 0.4'} - object.getownpropertydescriptors@2.1.8: - resolution: {integrity: sha512-qkHIGe4q0lSYMv0XI4SsBTJz3WaURhLvd0lKSgtVuOsJ2krg4SgMw3PIRQFMp07yi++UR3se2mkcLqsBNpBb/A==} - engines: {node: '>= 0.8'} - obliterator@2.0.4: resolution: {integrity: sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ==} @@ -3371,14 +3050,13 @@ packages: once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + one-time@1.0.0: + resolution: {integrity: sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==} + onetime@5.1.2: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} - open@8.4.2: - resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} - engines: {node: '>=12'} - opentracing@0.14.7: resolution: {integrity: sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q==} engines: {node: '>=0.10'} @@ -3456,10 +3134,6 @@ packages: resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} engines: {node: '>= 0.8'} - patch-console@1.0.0: - resolution: {integrity: sha512-nxl9nrnLQmh64iTzMfyylSlRozL7kAXIaxw1fVcLYdyhNkJCRUzirRZTikXGJsg+hc4fqpneTK6iU2H1Q8THSA==} - engines: {node: '>=10'} - path-exists@4.0.0: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} @@ -3667,39 +3341,18 @@ packages: resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} hasBin: true - react-devtools-core@4.28.5: - resolution: {integrity: sha512-cq/o30z9W2Wb4rzBefjv5fBalHU0rJGZCHAkf/RHSBWSSYwh8PlQTqqOJmgIIbBtpj27T6FIPXeomIjZtCNVqA==} - react-is@18.3.1: resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} - react-reconciler@0.26.2: - resolution: {integrity: sha512-nK6kgY28HwrMNwDnMui3dvm3rCFjZrcGiuwLc5COUipBK5hWHLOxMJhSnSomirqWwjPBJKV1QcbkI0VJr7Gl1Q==} - engines: {node: '>=0.10.0'} - peerDependencies: - react: ^17.0.2 - - react@17.0.2: - resolution: {integrity: sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==} - engines: {node: '>=0.10.0'} - read-package-json-fast@3.0.2: resolution: {integrity: sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - read-package-json@2.1.2: - resolution: {integrity: sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA==} - deprecated: This package is no longer supported. Please use @npmcli/package-json instead. - read-package-json@6.0.4: resolution: {integrity: sha512-AEtWXYfopBj2z5N5PbkAOeNHRPUg5q+Nen7QLxV8M2zJq1ym6/lCz3fYNTCXe19puu2d06jfHhrP7v/S2PtMMw==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} deprecated: This package is no longer supported. Please use @npmcli/package-json instead. - read-package-tree@5.3.1: - resolution: {integrity: sha512-mLUDsD5JVtlZxjSlPPx1RETkNjjvQYuweKwNVt1Sn8kP5Jh44pvYuUHCp6xSVDZWbNxVxG5lyZJ921aJH61sTw==} - deprecated: The functionality that this package provided is now in @npmcli/arborist - read-pkg-up@7.0.1: resolution: {integrity: sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==} engines: {node: '>=8'} @@ -3726,10 +3379,6 @@ packages: resolution: {integrity: sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - readdir-scoped-modules@1.1.0: - resolution: {integrity: sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw==} - deprecated: This functionality has been moved to @npmcli/fs - readdirp@3.6.0: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} @@ -3840,10 +3489,6 @@ packages: rsocket-websocket-client@1.0.0-alpha.3: resolution: {integrity: sha512-CwTwTNMGa8BKvrWde/kM3q8IHuzO8RCIfzuj25BsVe9y8eehDQHt4fXk0g1i/wpsxTm+RY6DxE6Vr5snozKVOg==} - run-async@2.4.1: - resolution: {integrity: sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==} - engines: {node: '>=0.12.0'} - run-async@3.0.0: resolution: {integrity: sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==} engines: {node: '>=0.12.0'} @@ -3878,9 +3523,6 @@ packages: safer-buffer@2.1.2: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} - scheduler@0.20.2: - resolution: {integrity: sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==} - secure-json-parse@2.7.0: resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} @@ -3895,10 +3537,6 @@ packages: resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==} hasBin: true - semver@6.3.1: - resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} - hasBin: true - semver@7.6.2: resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==} engines: {node: '>=10'} @@ -3970,6 +3608,9 @@ packages: engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} hasBin: true + simple-swizzle@0.2.2: + resolution: {integrity: sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==} + simple-update-notifier@2.0.0: resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==} engines: {node: '>=10'} @@ -3981,10 +3622,6 @@ packages: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} - slice-ansi@3.0.0: - resolution: {integrity: sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==} - engines: {node: '>=8'} - smart-buffer@4.2.0: resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} @@ -4065,9 +3702,8 @@ packages: resolution: {integrity: sha512-o57Wcn66jMQvfHG1FlYbWeZWW/dHZhJXjpIcTfXldXEk5nz5lStPo3mK0OJQfGR3RbZUlbISexbljkJzuEj/8Q==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - stack-utils@2.0.6: - resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} - engines: {node: '>=10'} + stack-trace@0.0.10: + resolution: {integrity: sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==} stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} @@ -4132,10 +3768,6 @@ packages: resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} engines: {node: '>=4'} - strip-final-newline@2.0.0: - resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} - engines: {node: '>=6'} - strip-indent@3.0.0: resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} engines: {node: '>=8'} @@ -4175,6 +3807,9 @@ packages: resolution: {integrity: sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg==} engines: {node: '>=8'} + text-hex@1.0.0: + resolution: {integrity: sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==} + thread-stream@2.7.0: resolution: {integrity: sha512-qQiRWsU/wvNolI6tbbCKd9iKaTnCXsTwVxhhKM6nctPdujTyztjlbUkUTUymidWcMnZ5pWR0ej4a0tjsW021vw==} @@ -4235,6 +3870,10 @@ packages: resolution: {integrity: sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==} engines: {node: '>=8'} + triple-beam@1.4.1: + resolution: {integrity: sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==} + engines: {node: '>= 14.0.0'} + ts-codec@1.2.2: resolution: {integrity: sha512-dlAsfmeEa3AA+6rvgKLlLb6rqTMthQNH9w+B2FGM5sRu7sTd0GSbmLuW0HAmRlIeyUEZzMXk0yRmP0afribqmg==} @@ -4263,11 +3902,6 @@ packages: '@swc/wasm': optional: true - ts-node@7.0.1: - resolution: {integrity: sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw==} - engines: {node: '>=4.2.0'} - hasBin: true - tsc-watch@6.2.0: resolution: {integrity: sha512-2LBhf9kjKXnz7KQ/puLHlozMzzUNHAdYBNMkg3eksQJ9GBAgMg8czznM83T5PmsoUvDnXzfIeQn2lNcIYDr8LA==} engines: {node: '>=12.12.0'} @@ -4304,10 +3938,6 @@ packages: resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} engines: {node: '>=4'} - type-fest@0.12.0: - resolution: {integrity: sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==} - engines: {node: '>=10'} - type-fest@0.13.1: resolution: {integrity: sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==} engines: {node: '>=10'} @@ -4355,11 +3985,6 @@ packages: typedarray-to-buffer@3.1.5: resolution: {integrity: sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==} - typescript@3.8.3: - resolution: {integrity: sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==} - engines: {node: '>=4.2.0'} - hasBin: true - typescript@5.2.2: resolution: {integrity: sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==} engines: {node: '>=14.17'} @@ -4399,17 +4024,10 @@ packages: resolution: {integrity: sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==} engines: {node: '>=12'} - universal-user-agent@6.0.1: - resolution: {integrity: sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==} - universalify@0.1.2: resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} engines: {node: '>= 4.0.0'} - universalify@2.0.1: - resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} - engines: {node: '>= 10.0.0'} - unpipe@1.0.0: resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} engines: {node: '>= 0.8'} @@ -4418,10 +4036,6 @@ packages: resolution: {integrity: sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==} engines: {node: '>=8'} - upath@1.2.0: - resolution: {integrity: sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==} - engines: {node: '>=4'} - update-notifier@6.0.2: resolution: {integrity: sha512-EDxhTEVPZZRLWYcJ4ZXjGFN0oP7qYvbXWzEgRm/Yql4dHX5wDbvh89YHP6PK1lzZJYrMtXUuZZz8XGK+U6U1og==} engines: {node: '>=14.16'} @@ -4432,9 +4046,6 @@ packages: util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} - util-promisify@2.1.0: - resolution: {integrity: sha512-K+5eQPYs14b3+E+hmE2J6gCZ4JmMl9DbYS6BeP2CHq6WMuNxErxf5B/n0fz85L8zUuoO6rIzNNmIQDu/j+1OcA==} - utils-merge@1.0.1: resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} engines: {node: '>= 0.4.0'} @@ -4590,14 +4201,18 @@ packages: wide-align@1.1.5: resolution: {integrity: sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==} - widest-line@3.1.0: - resolution: {integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==} - engines: {node: '>=8'} - widest-line@4.0.1: resolution: {integrity: sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==} engines: {node: '>=12'} + winston-transport@4.7.0: + resolution: {integrity: sha512-ajBj65K5I7denzer2IYW6+2bNIVqLGDHqDw3Ow8Ohh+vdW+rv4MZ6eiDvHoKhfJFZ2auyN8byXieDDJ96ViONg==} + engines: {node: '>= 12.0.0'} + + winston@3.13.0: + resolution: {integrity: sha512-rwidmA1w3SE4j0E5MuIufFhyJPBDG7Nu71RkZor1p2+qHvJSZ9GYDA81AyleQcZbh/+V6HjeBdfnTZJm9rSeQQ==} + engines: {node: '>= 12.0.0'} + wrap-ansi@6.2.0: resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} engines: {node: '>=8'} @@ -4616,18 +4231,6 @@ packages: write-file-atomic@3.0.3: resolution: {integrity: sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==} - ws@7.5.9: - resolution: {integrity: sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==} - engines: {node: '>=8.3.0'} - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - ws@8.17.0: resolution: {integrity: sha512-uJq6108EgZMAl20KagGkzCKfMEjxmKvZHG7Tlq0Z6nOky7YF7aq4mOx6xK8TJ/i1LeK4Qus7INktacctDgY8Ow==} engines: {node: '>=10.0.0'} @@ -4705,10 +4308,6 @@ packages: resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} engines: {node: '>=12'} - yn@2.0.0: - resolution: {integrity: sha512-uTv8J/wiWTgUTg+9vLTi//leUl5vDQS6uii/emeTb2ssY7vl6QWf2fFbIIGjnhjvbdKlU0ed7QPgY1htTC86jQ==} - engines: {node: '>=4'} - yn@3.1.1: resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} engines: {node: '>=6'} @@ -4721,10 +4320,6 @@ packages: resolution: {integrity: sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==} engines: {node: '>=12.20'} - yoga-layout-prebuilt@1.10.0: - resolution: {integrity: sha512-YnOmtSbv4MTf7RGJMK0FvZ+KD8OEe/J5BNnR0GHhD8J/XcG/Qvxgszm0Un6FTHWW4uHlTgP0IztiXQnGyIR45g==} - engines: {node: '>=8'} - zod@3.23.8: resolution: {integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==} @@ -4906,10 +4501,18 @@ snapshots: '@colors/colors@1.5.0': optional: true + '@colors/colors@1.6.0': {} + '@cspotcode/source-map-support@0.8.1': dependencies: '@jridgewell/trace-mapping': 0.3.9 + '@dabh/diagnostics@2.0.3': + dependencies: + colorspace: 1.1.4 + enabled: 2.0.0 + kuler: 2.0.0 + '@esbuild/aix-ppc64@0.20.2': optional: true @@ -5007,10 +4610,6 @@ snapshots: '@grpc/proto-loader': 0.7.13 '@js-sdsl/ordered-map': 4.4.2 - '@grpc/grpc-js@1.3.8': - dependencies: - '@types/node': 18.11.11 - '@grpc/proto-loader@0.7.13': dependencies: lodash.camelcase: 4.3.0 @@ -5035,156 +4634,40 @@ snapshots: dependencies: '@sinclair/typebox': 0.27.8 - '@journeyapps-platform/ci-tools@3.0.7(@octokit/core@4.2.4(encoding@0.1.13))(encoding@0.1.13)': + '@journeyapps-platform/micro-alerts@4.0.0': + dependencies: + '@journeyapps-platform/micro-async-hooks': 2.0.0 + '@journeyapps-platform/micro-errors': 3.0.0 + '@journeyapps-platform/micro-logger': 3.0.0 + '@journeyapps-platform/micro-tracing': 4.0.0 + '@sentry/node': 7.116.0 + lodash: 4.17.21 + + '@journeyapps-platform/micro-async-hooks@2.0.0': + dependencies: + lodash: 4.17.21 + + '@journeyapps-platform/micro-authorizers@6.0.0(encoding@0.1.13)': dependencies: - '@journeyapps-platform/cli-logger': 2.14.8 - '@octokit/plugin-rest-endpoint-methods': 7.2.3(@octokit/core@4.2.4(encoding@0.1.13)) - '@octokit/rest': 19.0.13(encoding@0.1.13) + '@journeyapps-platform/micro-errors': 3.0.0 + '@journeyapps-platform/micro-logger': 3.0.0 + '@journeyapps-platform/micro-schema': 6.0.1 + jose: 4.15.5 + lru-cache: 7.18.3 node-fetch: 2.7.0(encoding@0.1.13) transitivePeerDependencies: - - '@octokit/core' - encoding - '@journeyapps-platform/cli-logger@2.14.8': + '@journeyapps-platform/micro-codecs@3.0.1': dependencies: - chalk: 4.1.2 + bson: 6.7.0 + ts-codec: 1.2.2 - '@journeyapps-platform/deploy-cli@4.4.7(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@5.2.2)': - dependencies: - '@journeyapps-platform/ci-tools': 3.0.7(@octokit/core@4.2.4(encoding@0.1.13))(encoding@0.1.13) - '@journeyapps-platform/cli-logger': 2.14.8 - '@journeyapps-platform/deploy-config': 3.1.0 - '@journeyapps-platform/filesystem': 3.0.1 - '@journeyapps-platform/git': 1.0.6 - '@journeyapps-platform/shell-tools': 3.0.1 - inquirer: 8.2.6 - lodash: 4.17.21 - ts-node: 10.9.2(@types/node@18.11.11)(typescript@5.2.2) - transitivePeerDependencies: - - '@octokit/core' - - '@swc/core' - - '@swc/wasm' - - '@types/node' - - encoding - - typescript - - '@journeyapps-platform/deploy-config@3.1.0': - dependencies: - '@journeyapps-platform/micro-schema': 5.1.1 - inquirer: 8.2.6 - js-yaml: 4.1.0 - lodash: 4.17.21 - - '@journeyapps-platform/deploy-transformers@1.2.33(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@5.2.2)': - dependencies: - '@journeyapps-platform/deploy-cli': 4.4.7(@octokit/core@4.2.4(encoding@0.1.13))(@types/node@18.11.11)(encoding@0.1.13)(typescript@5.2.2) - inquirer: 8.2.6 - transitivePeerDependencies: - - '@octokit/core' - - '@swc/core' - - '@swc/wasm' - - '@types/node' - - encoding - - typescript - - '@journeyapps-platform/filesystem@3.0.1': - dependencies: - '@journeyapps-platform/cli-logger': 2.14.8 - fs-extra: 11.2.0 - inquirer: 8.2.6 - open: 8.4.2 - - '@journeyapps-platform/formatter-cli@3.1.9': - dependencies: - prettier: 2.8.8 - - '@journeyapps-platform/git@1.0.6': - dependencies: - '@journeyapps-platform/shell-tools': 3.0.1 - '@journeyapps-platform/workspace': 1.0.3 - is-subdir: 1.2.0 - micromatch: 4.0.7 - - '@journeyapps-platform/lerno@1.0.3': - dependencies: - '@journeyapps-platform/cli-logger': 2.14.8 - '@types/semver': 7.5.8 - fast-glob: 3.3.2 - ink: 3.2.0(react@17.0.2) - js-yaml: 4.1.0 - lodash: 4.17.21 - react: 17.0.2 - semver: 7.6.2 - yargs: 17.7.2 - transitivePeerDependencies: - - '@types/react' - - bufferutil - - utf-8-validate - - '@journeyapps-platform/micro-alerts@4.0.0': - dependencies: - '@journeyapps-platform/micro-async-hooks': 2.0.0 - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-tracing': 4.0.0 - '@sentry/node': 7.116.0 - lodash: 4.17.21 - - '@journeyapps-platform/micro-async-hooks@2.0.0': - dependencies: - lodash: 4.17.21 - - '@journeyapps-platform/micro-authorizers@6.0.0(encoding@0.1.13)': - dependencies: - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-schema': 6.0.1 - jose: 4.15.5 - lru-cache: 7.18.3 - node-fetch: 2.7.0(encoding@0.1.13) - transitivePeerDependencies: - - encoding - - '@journeyapps-platform/micro-cli@7.2.9(@types/node@18.11.11)(typescript@5.2.2)': - dependencies: - '@journeyapps-platform/cli-logger': 2.14.8 - '@journeyapps-platform/deploy-config': 3.1.0 - '@journeyapps-platform/git': 1.0.6 - '@journeyapps-platform/shell-tools': 3.0.1 - '@pulumi/pulumi': 3.51.1 - fs-extra: 11.2.0 - inquirer: 8.2.6 - js-yaml: 4.1.0 - lodash: 4.17.21 - ts-node: 10.9.2(@types/node@18.11.11)(typescript@5.2.2) - uuid: 9.0.1 - yargs: 17.7.2 - transitivePeerDependencies: - - '@swc/core' - - '@swc/wasm' - - '@types/node' - - supports-color - - typescript - - '@journeyapps-platform/micro-codecs@2.3.0': - dependencies: - bson: 4.7.2 - ts-codec: 1.2.2 - - '@journeyapps-platform/micro-codecs@3.0.1': - dependencies: - bson: 6.7.0 - ts-codec: 1.2.2 - - '@journeyapps-platform/micro-db@3.0.2': + '@journeyapps-platform/micro-db@3.0.2': dependencies: '@journeyapps-platform/micro-utils': 5.0.0 bson: 6.7.0 - '@journeyapps-platform/micro-dev@1.6.7': {} - - '@journeyapps-platform/micro-errors@2.6.0': {} - '@journeyapps-platform/micro-errors@3.0.0': {} '@journeyapps-platform/micro-events@2.0.1(socks@2.8.3)': @@ -5350,16 +4833,6 @@ snapshots: '@opentelemetry/api': 1.3.0 async: 3.2.5 - '@journeyapps-platform/micro-schema@5.1.1': - dependencies: - '@apidevtools/json-schema-ref-parser': 9.1.2 - '@journeyapps-platform/micro-codecs': 2.3.0 - '@journeyapps-platform/micro-errors': 2.6.0 - ajv: 8.14.0 - better-ajv-errors: 1.2.0(ajv@8.14.0) - ts-codec: 1.2.2 - zod: 3.23.8 - '@journeyapps-platform/micro-schema@6.0.1': dependencies: '@apidevtools/json-schema-ref-parser': 9.1.2 @@ -5458,22 +4931,8 @@ snapshots: - supports-color - utf-8-validate - '@journeyapps-platform/shell-tools@3.0.1': - dependencies: - spawndamnit: 2.0.0 - '@journeyapps-platform/types-micro-ws@2.0.0': {} - '@journeyapps-platform/workspace@1.0.3': - dependencies: - '@journeyapps-platform/shell-tools': 3.0.1 - '@manypkg/find-root': 2.2.1 - '@manypkg/get-packages': 2.2.1 - '@manypkg/tools': 1.1.0 - detect-indent: 6.1.0 - fs-extra: 11.2.0 - js-yaml: 4.1.0 - '@jridgewell/resolve-uri@3.1.2': {} '@jridgewell/sourcemap-codec@1.4.15': {} @@ -5491,8 +4950,6 @@ snapshots: dependencies: call-bind: 1.0.7 - '@logdna/tail-file@2.2.0': {} - '@manypkg/find-root@1.1.0': dependencies: '@babel/runtime': 7.24.6 @@ -5500,12 +4957,6 @@ snapshots: find-up: 4.1.0 fs-extra: 8.1.0 - '@manypkg/find-root@2.2.1': - dependencies: - '@manypkg/tools': 1.1.0 - find-up: 4.1.0 - fs-extra: 8.1.0 - '@manypkg/get-packages@1.1.3': dependencies: '@babel/runtime': 7.24.6 @@ -5515,18 +4966,6 @@ snapshots: globby: 11.1.0 read-yaml-file: 1.1.0 - '@manypkg/get-packages@2.2.1': - dependencies: - '@manypkg/find-root': 2.2.1 - '@manypkg/tools': 1.1.0 - - '@manypkg/tools@1.1.0': - dependencies: - fs-extra: 8.1.0 - globby: 11.1.0 - jju: 1.4.0 - read-yaml-file: 1.1.0 - '@mongodb-js/saslprep@1.1.7': dependencies: sparse-bitfield: 3.0.3 @@ -5592,105 +5031,16 @@ snapshots: - bluebird - supports-color - '@octokit/auth-token@3.0.4': {} - - '@octokit/core@4.2.4(encoding@0.1.13)': - dependencies: - '@octokit/auth-token': 3.0.4 - '@octokit/graphql': 5.0.6(encoding@0.1.13) - '@octokit/request': 6.2.8(encoding@0.1.13) - '@octokit/request-error': 3.0.3 - '@octokit/types': 9.3.2 - before-after-hook: 2.2.3 - universal-user-agent: 6.0.1 - transitivePeerDependencies: - - encoding - - '@octokit/endpoint@7.0.6': - dependencies: - '@octokit/types': 9.3.2 - is-plain-object: 5.0.0 - universal-user-agent: 6.0.1 - - '@octokit/graphql@5.0.6(encoding@0.1.13)': - dependencies: - '@octokit/request': 6.2.8(encoding@0.1.13) - '@octokit/types': 9.3.2 - universal-user-agent: 6.0.1 - transitivePeerDependencies: - - encoding - - '@octokit/openapi-types@18.1.1': {} - - '@octokit/plugin-paginate-rest@6.1.2(@octokit/core@4.2.4(encoding@0.1.13))': - dependencies: - '@octokit/core': 4.2.4(encoding@0.1.13) - '@octokit/tsconfig': 1.0.2 - '@octokit/types': 9.3.2 - - '@octokit/plugin-request-log@1.0.4(@octokit/core@4.2.4(encoding@0.1.13))': - dependencies: - '@octokit/core': 4.2.4(encoding@0.1.13) - - '@octokit/plugin-rest-endpoint-methods@7.2.3(@octokit/core@4.2.4(encoding@0.1.13))': - dependencies: - '@octokit/core': 4.2.4(encoding@0.1.13) - '@octokit/types': 10.0.0 - - '@octokit/request-error@3.0.3': - dependencies: - '@octokit/types': 9.3.2 - deprecation: 2.3.1 - once: 1.4.0 - - '@octokit/request@6.2.8(encoding@0.1.13)': - dependencies: - '@octokit/endpoint': 7.0.6 - '@octokit/request-error': 3.0.3 - '@octokit/types': 9.3.2 - is-plain-object: 5.0.0 - node-fetch: 2.7.0(encoding@0.1.13) - universal-user-agent: 6.0.1 - transitivePeerDependencies: - - encoding - - '@octokit/rest@19.0.13(encoding@0.1.13)': - dependencies: - '@octokit/core': 4.2.4(encoding@0.1.13) - '@octokit/plugin-paginate-rest': 6.1.2(@octokit/core@4.2.4(encoding@0.1.13)) - '@octokit/plugin-request-log': 1.0.4(@octokit/core@4.2.4(encoding@0.1.13)) - '@octokit/plugin-rest-endpoint-methods': 7.2.3(@octokit/core@4.2.4(encoding@0.1.13)) - transitivePeerDependencies: - - encoding - - '@octokit/tsconfig@1.0.2': {} - - '@octokit/types@10.0.0': - dependencies: - '@octokit/openapi-types': 18.1.1 - - '@octokit/types@9.3.2': - dependencies: - '@octokit/openapi-types': 18.1.1 - '@opentelemetry/api-logs@0.51.1': dependencies: '@opentelemetry/api': 1.8.0 - '@opentelemetry/api-metrics@0.32.0': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/api@1.3.0': {} '@opentelemetry/api@1.6.0': {} '@opentelemetry/api@1.8.0': {} - '@opentelemetry/context-async-hooks@1.24.1(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/context-async-hooks@1.8.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5800,14 +5150,6 @@ snapshots: '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/exporter-zipkin@1.24.1(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.24.1 - '@opentelemetry/exporter-zipkin@1.8.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5816,15 +5158,6 @@ snapshots: '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) '@opentelemetry/semantic-conventions': 1.8.0 - '@opentelemetry/instrumentation-grpc@0.32.0(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/api-metrics': 0.32.0 - '@opentelemetry/instrumentation': 0.32.0(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.6.0 - transitivePeerDependencies: - - supports-color - '@opentelemetry/instrumentation-http@0.34.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5843,16 +5176,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation@0.32.0(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/api-metrics': 0.32.0 - require-in-the-middle: 5.2.0 - semver: 7.6.2 - shimmer: 1.2.1 - transitivePeerDependencies: - - supports-color - '@opentelemetry/instrumentation@0.34.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5905,21 +5228,11 @@ snapshots: '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0) '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/propagator-b3@1.24.1(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/propagator-b3@1.8.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/propagator-jaeger@1.24.1(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/propagator-jaeger@1.8.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -6016,16 +5329,6 @@ snapshots: '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) '@opentelemetry/semantic-conventions': 1.8.0 - '@opentelemetry/sdk-trace-node@1.24.1(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/context-async-hooks': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/propagator-b3': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/propagator-jaeger': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) - semver: 7.6.2 - '@opentelemetry/sdk-trace-node@1.8.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -6040,8 +5343,6 @@ snapshots: '@opentelemetry/semantic-conventions@1.24.1': {} - '@opentelemetry/semantic-conventions@1.6.0': {} - '@opentelemetry/semantic-conventions@1.8.0': {} '@pkgjs/parseargs@0.11.0': @@ -6082,36 +5383,6 @@ snapshots: '@protobufjs/utf8@1.1.0': {} - '@pulumi/pulumi@3.51.1': - dependencies: - '@grpc/grpc-js': 1.3.8 - '@logdna/tail-file': 2.2.0 - '@opentelemetry/api': 1.8.0 - '@opentelemetry/exporter-zipkin': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-grpc': 0.32.0(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-node': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.24.1 - '@pulumi/query': 0.3.0 - execa: 5.1.1 - google-protobuf: 3.21.2 - ini: 2.0.0 - js-yaml: 3.14.1 - minimist: 1.2.8 - normalize-package-data: 2.5.0 - read-package-tree: 5.3.1 - require-from-string: 2.0.2 - semver: 6.3.1 - source-map-support: 0.5.21 - ts-node: 7.0.1 - typescript: 3.8.3 - upath: 1.2.0 - transitivePeerDependencies: - - supports-color - - '@pulumi/query@0.3.0': {} - '@rollup/rollup-android-arm-eabi@4.18.0': optional: true @@ -6316,6 +5587,8 @@ snapshots: '@types/strip-json-comments@0.0.30': {} + '@types/triple-beam@1.3.5': {} + '@types/uuid@9.0.8': {} '@types/webidl-conversions@7.0.3': {} @@ -6328,8 +5601,6 @@ snapshots: dependencies: '@types/node': 18.11.11 - '@types/yoga-layout@1.9.2': {} - '@vitest/expect@0.34.6': dependencies: '@vitest/spy': 0.34.6 @@ -6469,16 +5740,6 @@ snapshots: es-abstract: 1.23.3 es-shim-unscopables: 1.0.2 - array.prototype.reduce@1.0.7: - dependencies: - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - es-array-method-boxes-properly: 1.0.0 - es-errors: 1.3.0 - es-object-atoms: 1.0.0 - is-string: 1.0.7 - arraybuffer.prototype.slice@1.0.3: dependencies: array-buffer-byte-length: 1.0.1 @@ -6492,12 +5753,8 @@ snapshots: arrify@1.0.1: {} - asap@2.0.6: {} - assertion-error@1.1.0: {} - astral-regex@2.0.0: {} - async-mutex@0.5.0: dependencies: tslib: 2.6.2 @@ -6506,8 +5763,6 @@ snapshots: atomic-sleep@1.0.0: {} - auto-bind@4.0.0: {} - available-typed-arrays@1.0.7: dependencies: possible-typed-array-names: 1.0.0 @@ -6521,8 +5776,6 @@ snapshots: base64-js@1.5.1: {} - before-after-hook@2.2.3: {} - better-ajv-errors@1.2.0(ajv@8.14.0): dependencies: '@babel/code-frame': 7.24.6 @@ -6589,10 +5842,6 @@ snapshots: dependencies: wcwidth: 1.0.1 - bson@4.7.2: - dependencies: - buffer: 5.7.1 - bson@6.7.0: {} buffer-from@1.1.2: {} @@ -6731,14 +5980,10 @@ snapshots: chownr@2.0.0: {} - ci-info@2.0.0: {} - ci-info@3.9.0: {} clean-stack@2.2.0: {} - cli-boxes@2.2.1: {} - cli-boxes@3.0.0: {} cli-cursor@3.1.0: @@ -6753,13 +5998,6 @@ snapshots: optionalDependencies: '@colors/colors': 1.5.0 - cli-truncate@2.1.0: - dependencies: - slice-ansi: 3.0.0 - string-width: 4.2.3 - - cli-width@3.0.0: {} - cli-width@4.1.0: {} cliui@6.0.0: @@ -6782,10 +6020,6 @@ snapshots: clone@1.0.4: {} - code-excerpt@3.0.0: - dependencies: - convert-to-spaces: 1.0.2 - color-convert@1.9.3: dependencies: color-name: 1.1.3 @@ -6798,8 +6032,23 @@ snapshots: color-name@1.1.4: {} + color-string@1.9.1: + dependencies: + color-name: 1.1.4 + simple-swizzle: 0.2.2 + color-support@1.1.3: {} + color@3.2.1: + dependencies: + color-convert: 1.9.3 + color-string: 1.9.1 + + colorspace@1.1.4: + dependencies: + color: 3.2.1 + text-hex: 1.0.0 + commander@10.0.1: {} commander@12.1.0: {} @@ -6843,8 +6092,6 @@ snapshots: content-type@1.0.5: {} - convert-to-spaces@1.0.2: {} - cookie-signature@1.0.6: {} cookie@0.6.0: {} @@ -6931,8 +6178,6 @@ snapshots: optionalDependencies: supports-color: 5.5.0 - debuglog@1.0.1: {} - decamelize-keys@1.1.1: dependencies: decamelize: 1.2.0 @@ -6962,8 +6207,6 @@ snapshots: es-errors: 1.3.0 gopd: 1.0.1 - define-lazy-prop@2.0.0: {} - define-properties@1.2.1: dependencies: define-data-property: 1.1.4 @@ -6974,21 +6217,12 @@ snapshots: depd@2.0.0: {} - deprecation@2.3.1: {} - destroy@1.2.0: {} detect-indent@6.1.0: {} - dezalgo@1.0.4: - dependencies: - asap: 2.0.6 - wrappy: 1.0.2 - diff-sequences@29.6.3: {} - diff@3.5.0: {} - diff@4.0.2: {} dir-glob@3.0.1: @@ -7017,6 +6251,8 @@ snapshots: emoji-regex@9.2.2: {} + enabled@2.0.0: {} + encodeurl@1.0.2: {} encoding@0.1.13: @@ -7091,8 +6327,6 @@ snapshots: unbox-primitive: 1.0.2 which-typed-array: 1.1.15 - es-array-method-boxes-properly@1.0.0: {} - es-define-property@1.0.0: dependencies: get-intrinsic: 1.2.4 @@ -7153,8 +6387,6 @@ snapshots: escape-string-regexp@1.0.5: {} - escape-string-regexp@2.0.0: {} - esprima@4.0.1: {} etag@1.8.1: {} @@ -7173,18 +6405,6 @@ snapshots: events@3.3.0: {} - execa@5.1.1: - dependencies: - cross-spawn: 7.0.3 - get-stream: 6.0.1 - human-signals: 2.1.0 - is-stream: 2.0.1 - merge-stream: 2.0.0 - npm-run-path: 4.0.1 - onetime: 5.1.2 - signal-exit: 3.0.7 - strip-final-newline: 2.0.0 - exponential-backoff@3.1.1: {} express@4.19.2: @@ -7290,15 +6510,13 @@ snapshots: dependencies: reusify: 1.0.4 + fecha@4.2.3: {} + fetch-blob@3.2.0: dependencies: node-domexception: 1.0.0 web-streams-polyfill: 3.3.3 - figures@3.2.0: - dependencies: - escape-string-regexp: 1.0.5 - fill-range@7.1.1: dependencies: to-regex-range: 5.0.1 @@ -7336,6 +6554,8 @@ snapshots: micromatch: 4.0.7 pkg-dir: 4.2.0 + fn.name@1.1.0: {} + for-each@0.3.3: dependencies: is-callable: 1.2.7 @@ -7359,12 +6579,6 @@ snapshots: from@0.1.7: {} - fs-extra@11.2.0: - dependencies: - graceful-fs: 4.2.11 - jsonfile: 6.1.0 - universalify: 2.0.1 - fs-extra@7.0.1: dependencies: graceful-fs: 4.2.11 @@ -7483,8 +6697,6 @@ snapshots: globrex@0.1.2: {} - google-protobuf@3.21.2: {} - gopd@1.0.1: dependencies: get-intrinsic: 1.2.4 @@ -7586,8 +6798,6 @@ snapshots: human-id@1.0.2: {} - human-signals@2.1.0: {} - humanize-ms@1.2.1: dependencies: ms: 2.1.3 @@ -7634,54 +6844,6 @@ snapshots: ini@4.1.3: {} - ink@3.2.0(react@17.0.2): - dependencies: - ansi-escapes: 4.3.2 - auto-bind: 4.0.0 - chalk: 4.1.2 - cli-boxes: 2.2.1 - cli-cursor: 3.1.0 - cli-truncate: 2.1.0 - code-excerpt: 3.0.0 - indent-string: 4.0.0 - is-ci: 2.0.0 - lodash: 4.17.21 - patch-console: 1.0.0 - react: 17.0.2 - react-devtools-core: 4.28.5 - react-reconciler: 0.26.2(react@17.0.2) - scheduler: 0.20.2 - signal-exit: 3.0.7 - slice-ansi: 3.0.0 - stack-utils: 2.0.6 - string-width: 4.2.3 - type-fest: 0.12.0 - widest-line: 3.1.0 - wrap-ansi: 6.2.0 - ws: 7.5.9 - yoga-layout-prebuilt: 1.10.0 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - inquirer@8.2.6: - dependencies: - ansi-escapes: 4.3.2 - chalk: 4.1.2 - cli-cursor: 3.1.0 - cli-width: 3.0.0 - external-editor: 3.1.0 - figures: 3.2.0 - lodash: 4.17.21 - mute-stream: 0.0.8 - ora: 5.4.1 - run-async: 2.4.1 - rxjs: 7.8.1 - string-width: 4.2.3 - strip-ansi: 6.0.1 - through: 2.3.8 - wrap-ansi: 6.2.0 - inquirer@9.2.22: dependencies: '@inquirer/figures': 1.0.2 @@ -7722,6 +6884,8 @@ snapshots: is-arrayish@0.2.1: {} + is-arrayish@0.3.2: {} + is-bigint@1.0.4: dependencies: has-bigints: 1.0.2 @@ -7737,10 +6901,6 @@ snapshots: is-callable@1.2.7: {} - is-ci@2.0.0: - dependencies: - ci-info: 2.0.0 - is-ci@3.0.1: dependencies: ci-info: 3.9.0 @@ -7757,8 +6917,6 @@ snapshots: dependencies: has-tostringtag: 1.0.2 - is-docker@2.2.1: {} - is-extglob@2.1.1: {} is-fullwidth-code-point@3.0.0: {} @@ -7792,8 +6950,6 @@ snapshots: is-plain-obj@1.1.0: {} - is-plain-object@5.0.0: {} - is-regex@1.1.4: dependencies: call-bind: 1.0.7 @@ -7831,10 +6987,6 @@ snapshots: is-windows@1.0.2: {} - is-wsl@2.2.0: - dependencies: - is-docker: 2.2.1 - is-yarn-global@0.4.1: {} isarray@0.0.1: {} @@ -7903,12 +7055,6 @@ snapshots: optionalDependencies: graceful-fs: 4.2.11 - jsonfile@6.1.0: - dependencies: - universalify: 2.0.1 - optionalDependencies: - graceful-fs: 4.2.11 - jsonlines@0.1.1: {} jsonparse@1.3.1: {} @@ -7925,6 +7071,8 @@ snapshots: kleur@4.1.5: {} + kuler@2.0.0: {} + latest-version@7.0.0: dependencies: package-json: 8.1.1 @@ -7982,14 +7130,19 @@ snapshots: split: 0.2.10 through: 2.3.8 + logform@2.6.0: + dependencies: + '@colors/colors': 1.6.0 + '@types/triple-beam': 1.3.5 + fecha: 4.2.3 + ms: 2.1.3 + safe-stable-stringify: 2.4.3 + triple-beam: 1.4.1 + long@2.4.0: {} long@5.2.3: {} - loose-envify@1.4.0: - dependencies: - js-tokens: 4.0.0 - lossless-json@2.0.11: {} loupe@2.3.7: @@ -8081,8 +7234,6 @@ snapshots: merge-descriptors@1.0.1: {} - merge-stream@2.0.0: {} - merge2@1.4.1: {} methods@1.1.2: {} @@ -8180,10 +7331,6 @@ snapshots: mixme@0.5.10: {} - mkdirp@0.5.6: - dependencies: - minimist: 1.2.8 - mkdirp@1.0.4: {} mlly@1.7.0: @@ -8220,8 +7367,6 @@ snapshots: ms@2.1.3: {} - mute-stream@0.0.8: {} - mute-stream@1.0.0: {} nanoid@3.3.7: {} @@ -8357,8 +7502,6 @@ snapshots: dependencies: semver: 7.6.2 - npm-normalize-package-bin@1.0.1: {} - npm-normalize-package-bin@3.0.1: {} npm-package-arg@10.1.0: @@ -8391,10 +7534,6 @@ snapshots: transitivePeerDependencies: - supports-color - npm-run-path@4.0.1: - dependencies: - path-key: 3.1.1 - npmlog@6.0.2: dependencies: are-we-there-yet: 3.0.1 @@ -8415,16 +7554,6 @@ snapshots: has-symbols: 1.0.3 object-keys: 1.1.1 - object.getownpropertydescriptors@2.1.8: - dependencies: - array.prototype.reduce: 1.0.7 - call-bind: 1.0.7 - define-properties: 1.2.1 - es-abstract: 1.23.3 - es-object-atoms: 1.0.0 - gopd: 1.0.1 - safe-array-concat: 1.1.2 - obliterator@2.0.4: {} on-exit-leak-free@2.1.2: {} @@ -8437,15 +7566,13 @@ snapshots: dependencies: wrappy: 1.0.2 - onetime@5.1.2: + one-time@1.0.0: dependencies: - mimic-fn: 2.1.0 + fn.name: 1.1.0 - open@8.4.2: + onetime@5.1.2: dependencies: - define-lazy-prop: 2.0.0 - is-docker: 2.2.1 - is-wsl: 2.2.0 + mimic-fn: 2.1.0 opentracing@0.14.7: {} @@ -8541,8 +7668,6 @@ snapshots: parseurl@1.3.3: {} - patch-console@1.0.0: {} - path-exists@4.0.0: {} path-is-absolute@1.0.1: {} @@ -8756,40 +7881,13 @@ snapshots: minimist: 1.2.8 strip-json-comments: 2.0.1 - react-devtools-core@4.28.5: - dependencies: - shell-quote: 1.8.1 - ws: 7.5.9 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - react-is@18.3.1: {} - react-reconciler@0.26.2(react@17.0.2): - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - react: 17.0.2 - scheduler: 0.20.2 - - react@17.0.2: - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - read-package-json-fast@3.0.2: dependencies: json-parse-even-better-errors: 3.0.2 npm-normalize-package-bin: 3.0.1 - read-package-json@2.1.2: - dependencies: - glob: 7.2.3 - json-parse-even-better-errors: 2.3.1 - normalize-package-data: 2.5.0 - npm-normalize-package-bin: 1.0.1 - read-package-json@6.0.4: dependencies: glob: 10.4.1 @@ -8797,12 +7895,6 @@ snapshots: normalize-package-data: 5.0.0 npm-normalize-package-bin: 3.0.1 - read-package-tree@5.3.1: - dependencies: - read-package-json: 2.1.2 - readdir-scoped-modules: 1.1.0 - util-promisify: 2.1.0 - read-pkg-up@7.0.1: dependencies: find-up: 4.1.0 @@ -8854,13 +7946,6 @@ snapshots: process: 0.11.10 string_decoder: 1.3.0 - readdir-scoped-modules@1.1.0: - dependencies: - debuglog: 1.0.1 - dezalgo: 1.0.4 - graceful-fs: 4.2.11 - once: 1.4.0 - readdirp@3.6.0: dependencies: picomatch: 2.3.1 @@ -8974,8 +8059,6 @@ snapshots: dependencies: rsocket-core: 1.0.0-alpha.3 - run-async@2.4.1: {} - run-async@3.0.0: {} run-parallel@1.2.0: @@ -9011,11 +8094,6 @@ snapshots: safer-buffer@2.1.2: {} - scheduler@0.20.2: - dependencies: - loose-envify: 1.4.0 - object-assign: 4.1.1 - secure-json-parse@2.7.0: {} semver-diff@4.0.0: @@ -9026,8 +8104,6 @@ snapshots: semver@5.7.2: {} - semver@6.3.1: {} - semver@7.6.2: {} send@0.18.0: @@ -9118,6 +8194,10 @@ snapshots: transitivePeerDependencies: - supports-color + simple-swizzle@0.2.2: + dependencies: + is-arrayish: 0.3.2 + simple-update-notifier@2.0.0: dependencies: semver: 7.6.2 @@ -9126,12 +8206,6 @@ snapshots: slash@3.0.0: {} - slice-ansi@3.0.0: - dependencies: - ansi-styles: 4.3.0 - astral-regex: 2.0.0 - is-fullwidth-code-point: 3.0.0 - smart-buffer@4.2.0: {} smartwrap@2.0.2: @@ -9220,9 +8294,7 @@ snapshots: dependencies: minipass: 3.3.6 - stack-utils@2.0.6: - dependencies: - escape-string-regexp: 2.0.0 + stack-trace@0.0.10: {} stackback@0.0.2: {} @@ -9293,8 +8365,6 @@ snapshots: strip-bom@3.0.0: {} - strip-final-newline@2.0.0: {} - strip-indent@3.0.0: dependencies: min-indent: 1.0.1 @@ -9332,6 +8402,8 @@ snapshots: term-size@2.2.1: {} + text-hex@1.0.0: {} + thread-stream@2.7.0: dependencies: real-require: 0.2.0 @@ -9379,6 +8451,8 @@ snapshots: trim-newlines@3.0.1: {} + triple-beam@1.4.1: {} + ts-codec@1.2.2: {} ts-node-dev@2.0.0(@types/node@18.11.11)(typescript@5.2.2): @@ -9417,17 +8491,6 @@ snapshots: v8-compile-cache-lib: 3.0.1 yn: 3.1.1 - ts-node@7.0.1: - dependencies: - arrify: 1.0.1 - buffer-from: 1.1.2 - diff: 3.5.0 - make-error: 1.3.6 - minimist: 1.2.8 - mkdirp: 0.5.6 - source-map-support: 0.5.21 - yn: 2.0.0 - tsc-watch@6.2.0(typescript@5.2.2): dependencies: cross-spawn: 7.0.3 @@ -9469,8 +8532,6 @@ snapshots: type-detect@4.0.8: {} - type-fest@0.12.0: {} - type-fest@0.13.1: {} type-fest@0.21.3: {} @@ -9524,8 +8585,6 @@ snapshots: dependencies: is-typedarray: 1.0.0 - typescript@3.8.3: {} - typescript@5.2.2: {} typescript@5.4.5: {} @@ -9561,18 +8620,12 @@ snapshots: dependencies: crypto-random-string: 4.0.0 - universal-user-agent@6.0.1: {} - universalify@0.1.2: {} - universalify@2.0.1: {} - unpipe@1.0.0: {} untildify@4.0.0: {} - upath@1.2.0: {} - update-notifier@6.0.2: dependencies: boxen: 7.1.1 @@ -9596,10 +8649,6 @@ snapshots: util-deprecate@1.0.2: {} - util-promisify@2.1.0: - dependencies: - object.getownpropertydescriptors: 2.1.8 - utils-merge@1.0.1: {} uuid@8.3.2: {} @@ -9754,14 +8803,30 @@ snapshots: dependencies: string-width: 4.2.3 - widest-line@3.1.0: - dependencies: - string-width: 4.2.3 - widest-line@4.0.1: dependencies: string-width: 5.1.2 + winston-transport@4.7.0: + dependencies: + logform: 2.6.0 + readable-stream: 3.6.2 + triple-beam: 1.4.1 + + winston@3.13.0: + dependencies: + '@colors/colors': 1.6.0 + '@dabh/diagnostics': 2.0.3 + async: 3.2.5 + is-stream: 2.0.1 + logform: 2.6.0 + one-time: 1.0.0 + readable-stream: 3.6.2 + safe-stable-stringify: 2.4.3 + stack-trace: 0.0.10 + triple-beam: 1.4.1 + winston-transport: 4.7.0 + wrap-ansi@6.2.0: dependencies: ansi-styles: 4.3.0 @@ -9789,8 +8854,6 @@ snapshots: signal-exit: 3.0.7 typedarray-to-buffer: 3.1.5 - ws@7.5.9: {} - ws@8.17.0: {} ws@8.2.3: {} @@ -9854,16 +8917,10 @@ snapshots: y18n: 5.0.8 yargs-parser: 21.1.1 - yn@2.0.0: {} - yn@3.1.1: {} yocto-queue@0.1.0: {} yocto-queue@1.0.0: {} - yoga-layout-prebuilt@1.10.0: - dependencies: - '@types/yoga-layout': 1.9.2 - zod@3.23.8: {} diff --git a/service/package.json b/service/package.json index e6a9dc1bf..fd4f6ec62 100644 --- a/service/package.json +++ b/service/package.json @@ -36,6 +36,7 @@ "pgwire": "github:kagis/pgwire#f1cb95f9a0f42a612bb5a6b67bb2eb793fc5fc87", "ts-codec": "^1.2.2", "uuid": "^9.0.1", + "winston": "^3.13.0", "yaml": "^2.3.2" }, "devDependencies": { diff --git a/service/src/entry.ts b/service/src/entry.ts index a39d572e8..28db06786 100644 --- a/service/src/entry.ts +++ b/service/src/entry.ts @@ -1,7 +1,16 @@ import '@journeyapps-platform/micro/register'; import './util/register-alerting.js'; -import { entry, utils } from '@powersync/service-core'; +import winston from 'winston'; +import { entry, utils, system } from '@powersync/service-core'; +// Configure logging to console +system.logger.configure({ + format: + utils.env.NODE_ENV == 'production' + ? winston.format.combine(winston.format.json()) + : winston.format.combine(winston.format.simple()), + transports: [new winston.transports.Console()] +}); import { startServer } from './runners/server.js'; import { startStreamWorker } from './runners/stream-worker.js'; From 2248ae0d22167ee13174817dd1fea5679c46b775 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 13 Jun 2024 09:33:56 +0200 Subject: [PATCH 06/36] logger formats --- packages/service-core/src/system/Logger.ts | 9 +++++++++ service/src/entry.ts | 7 ++----- service/src/runners/server.ts | 12 ++++++------ service/src/runners/stream-worker.ts | 11 ++++++----- 4 files changed, 23 insertions(+), 16 deletions(-) diff --git a/packages/service-core/src/system/Logger.ts b/packages/service-core/src/system/Logger.ts index 0e8a307e5..2d9d153cb 100644 --- a/packages/service-core/src/system/Logger.ts +++ b/packages/service-core/src/system/Logger.ts @@ -6,3 +6,12 @@ import winston from 'winston'; * core package. */ export const logger = winston.createLogger(); + +export namespace Logger { + export const instance = logger; + export const development_format = winston.format.combine( + winston.format.colorize({ level: true }), + winston.format.simple() + ); + export const production_format = winston.format.combine(winston.format.timestamp(), winston.format.json()); +} diff --git a/service/src/entry.ts b/service/src/entry.ts index 28db06786..597e2c75a 100644 --- a/service/src/entry.ts +++ b/service/src/entry.ts @@ -2,13 +2,10 @@ import '@journeyapps-platform/micro/register'; import './util/register-alerting.js'; import winston from 'winston'; -import { entry, utils, system } from '@powersync/service-core'; +import { entry, utils, system, Logger } from '@powersync/service-core'; // Configure logging to console system.logger.configure({ - format: - utils.env.NODE_ENV == 'production' - ? winston.format.combine(winston.format.json()) - : winston.format.combine(winston.format.simple()), + format: utils.env.NODE_ENV == 'production' ? Logger.production_format : Logger.development_format, transports: [new winston.transports.Console()] }); diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index 2b4b196d3..cfee314fb 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -3,7 +3,7 @@ import fastify from 'fastify'; import cors from '@fastify/cors'; import * as micro from '@journeyapps-platform/micro'; import { RSocketRequestMeta } from '@powersync/service-rsocket-router'; -import { Metrics, routes, utils } from '@powersync/service-core'; +import { Metrics, logger, routes, utils } from '@powersync/service-core'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; import { Router, SocketRouter, StreamingRouter } from '../routes/router.js'; @@ -11,7 +11,7 @@ import { Router, SocketRouter, StreamingRouter } from '../routes/router.js'; * Starts an API server */ export async function startServer(runnerConfig: utils.RunnerConfig) { - micro.logger.info('Booting'); + logger.info('Booting'); const config = await utils.loadConfig(runnerConfig); const system = new PowerSyncSystem(config); @@ -69,7 +69,7 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { }; } } catch (ex) { - micro.logger.error(ex); + logger.error(ex); } return { @@ -84,9 +84,9 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { payloadDecoder: async (rawData?: Buffer) => rawData && deserialize(rawData) }); - micro.logger.info('Starting system'); + logger.info('Starting system'); await system.start(); - micro.logger.info('System started'); + logger.info('System started'); Metrics.getInstance().configureApiMetrics(); @@ -96,7 +96,7 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { // This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit system.addTerminationHandler(); - micro.logger.info(`Running on port ${system.config.port}`); + logger.info(`Running on port ${system.config.port}`); await micro.signals.getSystemProbe().ready(); // Enable in development to track memory usage: diff --git a/service/src/runners/stream-worker.ts b/service/src/runners/stream-worker.ts index 58fc935c7..84d2aa3b5 100644 --- a/service/src/runners/stream-worker.ts +++ b/service/src/runners/stream-worker.ts @@ -1,10 +1,11 @@ import * as micro from '@journeyapps-platform/micro'; import { Direction } from '@journeyapps-platform/micro-migrate'; -import { migrations, replication, utils, Metrics } from '@powersync/service-core'; +import { migrations, replication, utils, Metrics, logger } from '@powersync/service-core'; + import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; export async function startStreamWorker(runnerConfig: utils.RunnerConfig) { - micro.logger.info('Booting'); + logger.info('Booting'); const config = await utils.loadConfig(runnerConfig); @@ -18,9 +19,9 @@ export async function startStreamWorker(runnerConfig: utils.RunnerConfig) { const system = new PowerSyncSystem(config); - micro.logger.info('Starting system'); + logger.info('Starting system'); await system.start(); - micro.logger.info('System started'); + logger.info('System started'); Metrics.getInstance().configureReplicationMetrics(system); @@ -31,7 +32,7 @@ export async function startStreamWorker(runnerConfig: utils.RunnerConfig) { // This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit system.addTerminationHandler(); - micro.signals.getTerminationHandler()!.handleTerminationSignal(async () => { + system.terminationHandler.handleTerminationSignal(async () => { await mngr.stop(); }); From 7d22ba8066e4d81a7a662f6a5f5eb4e0333edb03 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 13 Jun 2024 09:36:48 +0200 Subject: [PATCH 07/36] added lifecyled system and temrination handlers --- packages/service-core/package.json | 2 + .../src/system/CorePowerSyncSystem.ts | 41 ++++-- .../src/system/LifeCycledSystem.ts | 59 +++++++++ .../src/system/signals/termination-handler.ts | 122 ++++++++++++++++++ .../service-core/src/system/system-index.ts | 3 + .../impl/filesystem-config-collector.ts | 12 +- pnpm-lock.yaml | 11 ++ service/src/system/PowerSyncSystem.ts | 21 +-- 8 files changed, 238 insertions(+), 33 deletions(-) create mode 100644 packages/service-core/src/system/LifeCycledSystem.ts create mode 100644 packages/service-core/src/system/signals/termination-handler.ts diff --git a/packages/service-core/package.json b/packages/service-core/package.json index f7825abc6..24207b7fb 100644 --- a/packages/service-core/package.json +++ b/packages/service-core/package.json @@ -36,6 +36,7 @@ "ipaddr.js": "^2.1.0", "ix": "^5.0.0", "jose": "^4.15.1", + "lodash": "^4.17.21", "lru-cache": "^10.2.2", "mongodb": "^6.7.0", "node-fetch": "^3.3.2", @@ -46,6 +47,7 @@ "yaml": "^2.3.2" }, "devDependencies": { + "@types/lodash": "^4.17.5", "@types/uuid": "^9.0.4", "typescript": "^5.2.2", "vite-tsconfig-paths": "^4.3.2", diff --git a/packages/service-core/src/system/CorePowerSyncSystem.ts b/packages/service-core/src/system/CorePowerSyncSystem.ts index 304d38b5a..ec4b50b53 100644 --- a/packages/service-core/src/system/CorePowerSyncSystem.ts +++ b/packages/service-core/src/system/CorePowerSyncSystem.ts @@ -1,26 +1,51 @@ import * as pgwire from '@powersync/service-jpgwire'; -import * as micro from '@journeyapps-platform/micro'; -import * as auth from '../auth/auth-index.js'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; +import { LifeCycledSystem, LifeCycledSystemOptions } from './LifeCycledSystem.js'; +import { logger } from './Logger.js'; -export abstract class CorePowerSyncSystem extends micro.system.MicroSystem { +export abstract class CorePowerSyncSystem extends LifeCycledSystem { abstract storage: storage.BucketStorageFactory; - abstract client_keystore: auth.KeyStore; - abstract dev_client_keystore: auth.KeyStore; abstract pgwire_pool?: pgwire.PgClient; protected stopHandlers: Set<() => void> = new Set(); closed: boolean; - constructor(public config: utils.ResolvedPowerSyncConfig) { - super(); + constructor(public config: utils.ResolvedPowerSyncConfig, options?: LifeCycledSystemOptions) { + super(options); this.closed = false; } - abstract addTerminationHandler(): void; + get client_keystore() { + return this.config.client_keystore; + } + + get dev_client_keystore() { + return this.config.dev_client_keystore; + } + + /** + * Adds a termination handler which will call handlers registered via + * [addStopHandler]. + * This should be called after the server is started and it's termination handler is added. + * This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit + * + * TODO this could be improved once router terminations are handled + */ + addTerminationHandler() { + this.terminationHandler.handleTerminationSignal(async () => { + // Close open streams, so that they don't block the server from closing. + // Note: This does not work well when streaming requests are queued. In that case, the server still doesn't + // close in the 30-second timeout. + this.closed = true; + logger.info(`Closing ${this.stopHandlers.size} streams`); + for (let handler of this.stopHandlers) { + handler(); + } + }); + } addStopHandler(handler: () => void): () => void { if (this.closed) { diff --git a/packages/service-core/src/system/LifeCycledSystem.ts b/packages/service-core/src/system/LifeCycledSystem.ts new file mode 100644 index 000000000..57824b384 --- /dev/null +++ b/packages/service-core/src/system/LifeCycledSystem.ts @@ -0,0 +1,59 @@ +/** + * An interface that can be used to create a stateful System. A System is an entity + * which contains state, generally in the form of connections, that must be stated + * and stopped gracefully along with a services lifecycle. + * + * A System can contain anything but should offer a `start` and `stop` operation + */ + +import { TerminationHandler, createTerminationHandler } from './signals/termination-handler.js'; + +export type LifecycleCallback = (singleton: T) => Promise | void; + +export type PartialLifecycle = { + start?: LifecycleCallback; + stop?: LifecycleCallback; +}; + +export type ComponentLifecycle = PartialLifecycle & { + component: T; +}; +export type LifecycleHandler = () => ComponentLifecycle; + +export type LifeCycledSystemOptions = { + /** + * Optional termination handler. Defaults to a NodeJS process listener handler + * if not provided. + */ + terminationHandler?: TerminationHandler; +}; + +export abstract class LifeCycledSystem { + components: ComponentLifecycle[] = []; + terminationHandler: TerminationHandler; + + constructor(options?: LifeCycledSystemOptions) { + this.terminationHandler = options?.terminationHandler ?? createTerminationHandler(); + this.terminationHandler.handleTerminationSignal(() => this.stop()); + } + + withLifecycle = (component: T, lifecycle: PartialLifecycle): T => { + this.components.push({ + component: component, + ...lifecycle + }); + return component; + }; + + start = async () => { + for (const lifecycle of this.components) { + await lifecycle.start?.(lifecycle.component); + } + }; + + stop = async () => { + for (const lifecycle of this.components.reverse()) { + await lifecycle.stop?.(lifecycle.component); + } + }; +} diff --git a/packages/service-core/src/system/signals/termination-handler.ts b/packages/service-core/src/system/signals/termination-handler.ts new file mode 100644 index 000000000..31e8f26b3 --- /dev/null +++ b/packages/service-core/src/system/signals/termination-handler.ts @@ -0,0 +1,122 @@ +import _ from 'lodash'; +import { logger } from '../Logger.js'; + +export enum Signal { + SIGTERM = 'SIGTERM', + SIGINT = 'SIGINT', + SIGUSR2 = 'SIGUSR2' +} + +export type Handler = (event: Signal) => void | Promise; + +type TerminationHandlerParams = { + /** + * A list of process signals to listen for. + * + * @default ['SIGTERM', 'SIGINT', 'SIGUSR2'] + */ + signals?: Signal[]; + + /** + * The timeout for all handlers to complete. Once this is reached the process will be exited + */ + timeout_ms?: number; +}; + +/** + * Utility function to handle external termination signals. Calls an async handler + * and then kills the application. + */ +export const createTerminationHandler = (params?: TerminationHandlerParams) => { + const { signals = Object.values(Signal), timeout_ms = 30000 } = params || {}; + + const handlers: Handler[] = []; + + let signal_received = false; + const signalHandler = (signal: Signal) => { + if (signal === Signal.SIGINT) { + logger.info('Send ^C again to force exit'); + } + + if (signal_received) { + // The SIGINT signal is sent on ctrl-c - if the user presses ctrl-c twice then we + // hard exit + if (signal === Signal.SIGINT) { + logger.info('Received second ^C. Exiting'); + process.exit(1); + } + return; + } + + signal_received = true; + + new Promise(async (resolve) => { + logger.info('Terminating gracefully ...'); + + for (const handler of handlers) { + try { + await handler(signal); + } catch (err) { + logger.error('Failed to execute termination handler', err); + } + } + + logger.info('Exiting'); + resolve(); + }).then(() => { + process.exit(0); + }); + + setTimeout(() => { + logger.error('Timed out waiting for program to exit. Force exiting'); + process.exit(1); + }, timeout_ms); + }; + + // This debounce is needed as certain executors (like npm, pnpm) seem to send kill signals multiple times. This debounce + // prevents the termination handler from immediately exiting under those circumstances + const debouncedSignalHandler = _.debounce(signalHandler, 1000, { + leading: true, + trailing: false + }); + + for (const signal of signals) { + process.on(signal, () => debouncedSignalHandler(signal)); + } + + return { + /** + * Register a termination handler to be run when a termination signal is received. Calling + * this function will register the termination handler at the start of the list resulting + * in handlers being called in the reverse order to how they were registered. + * + * Use `handleTerminationSignalLast` if you want to register the handler at the end. + */ + handleTerminationSignal: (handler: Handler) => { + handlers.unshift(handler); + }, + + /** + * This is the same as `handleTerminationSignal` except it will register a termination handler + * at the end of the list - resulting in it being run after existing termination handlers. + * + * Use `handleTerminationSignal` if you want to register the handler at the start. + */ + handleTerminationSignalLast: (handler: Handler) => { + handlers.push(handler); + }, + + gracefully: async ( + exec: () => T | Promise, + handler: (component: T, signal: Signal) => Promise | void + ) => { + const component = await exec(); + handlers.unshift((signal) => { + return handler(component, signal); + }); + return component; + } + }; +}; + +export type TerminationHandler = ReturnType; diff --git a/packages/service-core/src/system/system-index.ts b/packages/service-core/src/system/system-index.ts index 34886eb26..3eb0666fe 100644 --- a/packages/service-core/src/system/system-index.ts +++ b/packages/service-core/src/system/system-index.ts @@ -1,2 +1,5 @@ export * from './CorePowerSyncSystem.js'; +export * from './LifeCycledSystem.js'; export * from './Logger.js'; + +export * from './signals/termination-handler.js'; diff --git a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts index c9ce8b233..f7da7e3c4 100644 --- a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts +++ b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts @@ -1,4 +1,5 @@ import * as fs from 'fs/promises'; +import * as path from 'path'; import { ConfigCollector, ConfigFileFormat } from '../config-collector.js'; import { RunnerConfig } from '../../types.js'; @@ -15,16 +16,17 @@ export class FileSystemConfigCollector extends ConfigCollector { return null; } + const resolvedPath = path.resolve(process.cwd(), config_path); + // Check if file exists try { - await fs.access(config_path, fs.constants.F_OK); + await fs.access(resolvedPath, fs.constants.F_OK); } catch (ex) { - throw new Error(`Config file path ${config_path} was specified, but the file does not exist.`); + throw new Error(`Config file path ${resolvedPath} was specified, but the file does not exist.`); } - logger.info(`Collecting PowerSync configuration from File: ${config_path}`); - - const content = await fs.readFile(config_path, 'utf-8'); + logger.info(`Collecting PowerSync configuration from File: ${resolvedPath}`); + const content = await fs.readFile(resolvedPath, 'utf-8'); let contentType: ConfigFileFormat | undefined; switch (true) { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 03cf13e90..10522c181 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -171,6 +171,9 @@ importers: jose: specifier: ^4.15.1 version: 4.15.5 + lodash: + specifier: ^4.17.21 + version: 4.17.21 lru-cache: specifier: ^10.2.2 version: 10.2.2 @@ -196,6 +199,9 @@ importers: specifier: ^2.3.2 version: 2.4.2 devDependencies: + '@types/lodash': + specifier: ^4.17.5 + version: 4.17.5 '@types/uuid': specifier: ^9.0.4 version: 9.0.8 @@ -1259,6 +1265,9 @@ packages: '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + '@types/lodash@4.17.5': + resolution: {integrity: sha512-MBIOHVZqVqgfro1euRDWX7OO0fBVUUMrN6Pwm8LQsz8cWhEpihlvR70ENj3f40j58TNxZaWv2ndSkInykNBBJw==} + '@types/mime@1.3.5': resolution: {integrity: sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==} @@ -5550,6 +5559,8 @@ snapshots: '@types/json-schema@7.0.15': {} + '@types/lodash@4.17.5': {} + '@types/mime@1.3.5': {} '@types/minimist@1.2.5': {} diff --git a/service/src/system/PowerSyncSystem.ts b/service/src/system/PowerSyncSystem.ts index 4243b4116..93024dca7 100644 --- a/service/src/system/PowerSyncSystem.ts +++ b/service/src/system/PowerSyncSystem.ts @@ -1,11 +1,8 @@ -import * as micro from '@journeyapps-platform/micro'; -import { auth, db, system, utils, storage, Metrics } from '@powersync/service-core'; +import { db, system, utils, storage, Metrics } from '@powersync/service-core'; import * as pgwire from '@powersync/service-jpgwire'; export class PowerSyncSystem extends system.CorePowerSyncSystem { storage: storage.BucketStorageFactory; - client_keystore: auth.KeyStore; - dev_client_keystore: auth.KeyStore; pgwire_pool?: pgwire.PgClient; constructor(public config: utils.ResolvedPowerSyncConfig) { @@ -13,9 +10,6 @@ export class PowerSyncSystem extends system.CorePowerSyncSystem { utils.setTags(config.metadata); - this.client_keystore = config.client_keystore; - this.dev_client_keystore = config.dev_client_keystore; - const pgOptions = config.connection; if (pgOptions != null) { const pool = pgwire.connectPgWirePool(pgOptions, { @@ -58,17 +52,4 @@ export class PowerSyncSystem extends system.CorePowerSyncSystem { } }); } - - addTerminationHandler(): void { - micro.signals.getTerminationHandler()!.handleTerminationSignal(async () => { - // Close open streams, so that they don't block the server from closing. - // Note: This does not work well when streaming requests are queued. In that case, the server still doesn't - // close in the 30-second timeout. - this.closed = true; - micro.logger.info(`Closing ${this.stopHandlers.size} streams`); - for (let handler of this.stopHandlers) { - handler(); - } - }); - } } From bd20cec9c6cf17911dc18321b769812b63bf9916 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 13 Jun 2024 10:35:27 +0200 Subject: [PATCH 08/36] added probes --- .../service-core/src/replication/WalStream.ts | 25 +++++----- .../src/replication/WalStreamManager.ts | 9 ++-- .../src/replication/WalStreamRunner.ts | 5 +- packages/service-core/src/runner/teardown.ts | 4 +- .../src/system/LifeCycledSystem.ts | 9 ++++ .../src/system/signals/probes/fs-probes.ts | 47 +++++++++++++++++++ .../system/signals/probes/memory-probes.ts | 36 ++++++++++++++ .../src/system/signals/probes/probes.ts | 15 ++++++ .../service-core/src/system/system-index.ts | 3 ++ pnpm-lock.yaml | 3 ++ service/src/runners/server.ts | 2 +- service/src/runners/stream-worker.ts | 3 +- 12 files changed, 139 insertions(+), 22 deletions(-) create mode 100644 packages/service-core/src/system/signals/probes/fs-probes.ts create mode 100644 packages/service-core/src/system/signals/probes/memory-probes.ts create mode 100644 packages/service-core/src/system/signals/probes/probes.ts diff --git a/packages/service-core/src/replication/WalStream.ts b/packages/service-core/src/replication/WalStream.ts index 15c187959..8646c9127 100644 --- a/packages/service-core/src/replication/WalStream.ts +++ b/packages/service-core/src/replication/WalStream.ts @@ -1,4 +1,3 @@ -import * as fs from 'fs/promises'; import * as pgwire from '@powersync/service-jpgwire'; import * as micro from '@journeyapps-platform/micro'; import { SqliteRow, SqlSyncRules, TablePattern, toSyncRulesRow } from '@powersync/service-sync-rules'; @@ -11,6 +10,7 @@ import { getReplicationIdentityColumns } from './util.js'; import { WalConnection } from './WalConnection.js'; import { Metrics } from '../metrics/Metrics.js'; import { logger } from '../system/Logger.js'; +import { ProbeModule } from '../system/system-index.js'; export const ZERO_LSN = '00000000/00000000'; @@ -20,6 +20,7 @@ export interface WalStreamOptions { factory: storage.BucketStorageFactory; storage: storage.SyncRulesBucketStorage; abort_signal: AbortSignal; + probe: ProbeModule; } interface InitResult { @@ -51,12 +52,15 @@ export class WalStream { private startedStreaming = false; + private probe: ProbeModule; + constructor(options: WalStreamOptions) { this.storage = options.storage; this.sync_rules = options.storage.sync_rules; this.group_id = options.storage.group_id; this.slot_name = options.storage.slot_name; this.connections = options.connections; + this.probe = options.probe; this.wal_connection = new WalConnection({ db: this.connections.pool, sync_rules: this.sync_rules }); this.abort_signal = options.abort_signal; @@ -196,7 +200,7 @@ export class WalStream { // Check that replication slot exists for (let i = 120; i >= 0; i--) { - await touch(); + await this.touch(); if (i == 0) { util.captureException(last_error, { @@ -357,7 +361,7 @@ WHERE oid = $1::regclass`, await this.snapshotTable(batch, db, table); await batch.markSnapshotDone([table], lsn); - await touch(); + await this.touch(); } } await batch.commit(lsn); @@ -408,7 +412,7 @@ WHERE oid = $1::regclass`, // pgwire streaming uses reasonable chunk sizes, so we flush at the end // of each chunk. await batch.flush(); - await touch(); + await this.touch(); } await batch.flush(); @@ -561,7 +565,7 @@ WHERE oid = $1::regclass`, let count = 0; for await (const chunk of replicationStream.pgoutputDecode()) { - await touch(); + await this.touch(); if (this.abort_signal.aborted) { break; @@ -614,13 +618,8 @@ WHERE oid = $1::regclass`, replicationStream.ack(lsn); } -} -export async function touch() { - // FIXME: The probe does not actually check the timestamp on this. - // FIXME: We need a timeout of around 5+ minutes if we do start checking the timestamp, - // or reduce PING_INTERVAL. - await micro.signals.getSystemProbe().touch(); - // FIXME: The above probe touches the wrong file - await fs.writeFile('.probes/poll', `${Date.now()}`); + async touch() { + return this.probe.touch(); + } } diff --git a/packages/service-core/src/replication/WalStreamManager.ts b/packages/service-core/src/replication/WalStreamManager.ts index ffcb264ab..88279e73d 100644 --- a/packages/service-core/src/replication/WalStreamManager.ts +++ b/packages/service-core/src/replication/WalStreamManager.ts @@ -5,7 +5,6 @@ import * as storage from '../storage/storage-index.js'; import * as util from '../util/util-index.js'; import { DefaultErrorRateLimiter } from './ErrorRateLimiter.js'; -import { touch } from './WalStream.js'; import { WalStreamRunner } from './WalStreamRunner.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; import { logger } from '../system/Logger.js'; @@ -76,7 +75,7 @@ export class WalStreamManager { logger.info('No sync rules configured - configure via API'); } while (!this.stopped) { - await touch(); + await this.system.probe.touch(); try { const pool = this.system.pgwire_pool; if (pool) { @@ -159,7 +158,8 @@ export class WalStreamManager { storage: storage, source_db: this.system.config.connection!, lock, - rateLimiter: this.rateLimiter + rateLimiter: this.rateLimiter, + probe: this.system.probe }); newStreams.set(syncRules.id, stream); stream.start(); @@ -200,7 +200,8 @@ export class WalStreamManager { factory: this.storage, storage: storage, source_db: this.system.config.connection!, - lock + lock, + probe: this.system.probe }); await stream.terminate(); } finally { diff --git a/packages/service-core/src/replication/WalStreamRunner.ts b/packages/service-core/src/replication/WalStreamRunner.ts index 9151abed9..fffcfa91e 100644 --- a/packages/service-core/src/replication/WalStreamRunner.ts +++ b/packages/service-core/src/replication/WalStreamRunner.ts @@ -7,6 +7,7 @@ import { ErrorRateLimiter } from './ErrorRateLimiter.js'; import { MissingReplicationSlotError, WalStream } from './WalStream.js'; import { ResolvedConnection } from '../util/config/types.js'; import { logger } from '../system/Logger.js'; +import { ProbeModule } from '../system/system-index.js'; export interface WalStreamRunnerOptions { factory: storage.BucketStorageFactory; @@ -14,6 +15,7 @@ export interface WalStreamRunnerOptions { source_db: ResolvedConnection; lock: storage.ReplicationLock; rateLimiter?: ErrorRateLimiter; + probe: ProbeModule; } export class WalStreamRunner { @@ -92,7 +94,8 @@ export class WalStreamRunner { abort_signal: this.abortController.signal, factory: this.options.factory, storage: this.options.storage, - connections + connections, + probe: this.options.probe }); await stream.replicate(); } catch (e) { diff --git a/packages/service-core/src/runner/teardown.ts b/packages/service-core/src/runner/teardown.ts index 896d1b091..216f3978f 100644 --- a/packages/service-core/src/runner/teardown.ts +++ b/packages/service-core/src/runner/teardown.ts @@ -10,6 +10,7 @@ import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; import * as replication from '../replication/replication-index.js'; import { logger } from '../system/Logger.js'; +import { createFSProbe } from '../system/system-index.js'; /** * Attempt to terminate a single sync rules instance. @@ -31,7 +32,8 @@ async function terminateReplicator( factory: storageFactory, storage: storage, source_db: connection, - lock + lock, + probe: createFSProbe() }); console.log('terminating', stream.slot_name); await stream.terminate(); diff --git a/packages/service-core/src/system/LifeCycledSystem.ts b/packages/service-core/src/system/LifeCycledSystem.ts index 57824b384..2658ef4cb 100644 --- a/packages/service-core/src/system/LifeCycledSystem.ts +++ b/packages/service-core/src/system/LifeCycledSystem.ts @@ -7,6 +7,7 @@ */ import { TerminationHandler, createTerminationHandler } from './signals/termination-handler.js'; +import { ProbeModule, createFSProbe } from './system-index.js'; export type LifecycleCallback = (singleton: T) => Promise | void; @@ -26,15 +27,23 @@ export type LifeCycledSystemOptions = { * if not provided. */ terminationHandler?: TerminationHandler; + + /** + * Probe handler for system readiness and liveliness state management. + * Defaults to a FileSystem based probe if not provided. + */ + probe?: ProbeModule; }; export abstract class LifeCycledSystem { components: ComponentLifecycle[] = []; terminationHandler: TerminationHandler; + probe: ProbeModule; constructor(options?: LifeCycledSystemOptions) { this.terminationHandler = options?.terminationHandler ?? createTerminationHandler(); this.terminationHandler.handleTerminationSignal(() => this.stop()); + this.probe = options?.probe ?? createFSProbe(); } withLifecycle = (component: T, lifecycle: PartialLifecycle): T => { diff --git a/packages/service-core/src/system/signals/probes/fs-probes.ts b/packages/service-core/src/system/signals/probes/fs-probes.ts new file mode 100644 index 000000000..a0fbd1fd9 --- /dev/null +++ b/packages/service-core/src/system/signals/probes/fs-probes.ts @@ -0,0 +1,47 @@ +import * as fs from 'node:fs/promises'; +import * as path from 'node:path'; + +import { createInMemoryProbe } from './memory-probes.js'; +import { ProbeModule } from './probes.js'; + +export const createFSProbe = (dir: string = path.join(process.cwd(), '.probes')): ProbeModule => { + const memory_probe = createInMemoryProbe(); + + const startup_path = path.join(dir, 'startup'); + const readiness_path = path.join(dir, 'ready'); + /** + * Note this is different to Journey Micro. + * Updated here from `alive` to `poll` as per previous comment: + * FIXME: The above probe touches the wrong file + * */ + const liveness_path = path.join(dir, 'poll'); + + const touchFile = async (path: string) => { + try { + await fs.mkdir(dir, { + recursive: true + }); + await fs.writeFile(path, `${Date.now()}`); + } catch (err) {} + }; + + return { + poll_timeout: memory_probe.poll_timeout, + state: memory_probe.state, + + async ready() { + await Promise.all([touchFile(startup_path), touchFile(readiness_path)]); + await memory_probe.ready(); + }, + async unready() { + try { + await fs.unlink(readiness_path); + } catch (err) {} + await memory_probe.unready(); + }, + async touch() { + await touchFile(liveness_path); + await memory_probe.touch(); + } + }; +}; diff --git a/packages/service-core/src/system/signals/probes/memory-probes.ts b/packages/service-core/src/system/signals/probes/memory-probes.ts new file mode 100644 index 000000000..1b520f4f6 --- /dev/null +++ b/packages/service-core/src/system/signals/probes/memory-probes.ts @@ -0,0 +1,36 @@ +import { ProbeModule, ProbeState } from './probes.js'; + +export type ProbeParams = { + poll_timeout_ms: number; +}; + +export const createInMemoryProbe = (params?: ProbeParams): ProbeModule => { + const state: ProbeState = { + ready: false, + started: false, + touched_at: new Date() + }; + + return { + poll_timeout: params?.poll_timeout_ms ?? 10000, + + state: () => { + return { + ready: state.ready, + started: state.started, + touched_at: state.touched_at + }; + }, + ready: async () => { + state.ready = true; + state.started = true; + state.touched_at = new Date(); + }, + unready: async () => { + state.ready = false; + }, + touch: async () => { + state.touched_at = new Date(); + } + }; +}; diff --git a/packages/service-core/src/system/signals/probes/probes.ts b/packages/service-core/src/system/signals/probes/probes.ts new file mode 100644 index 000000000..649b30323 --- /dev/null +++ b/packages/service-core/src/system/signals/probes/probes.ts @@ -0,0 +1,15 @@ +export type ProbeState = { + ready: boolean; + started: boolean; + touched_at: Date; +}; + +export type ProbeModule = { + poll_timeout: number; + + state(): ProbeState; + + ready(): Promise; + unready(): Promise; + touch(): Promise; +}; diff --git a/packages/service-core/src/system/system-index.ts b/packages/service-core/src/system/system-index.ts index 3eb0666fe..26422472f 100644 --- a/packages/service-core/src/system/system-index.ts +++ b/packages/service-core/src/system/system-index.ts @@ -3,3 +3,6 @@ export * from './LifeCycledSystem.js'; export * from './Logger.js'; export * from './signals/termination-handler.js'; +export * from './signals/probes/fs-probes.js'; +export * from './signals/probes/memory-probes.js'; +export * from './signals/probes/probes.js'; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 10522c181..13e525f12 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -86,6 +86,9 @@ importers: uuid: specifier: ^9.0.1 version: 9.0.1 + winston: + specifier: ^3.13.0 + version: 3.13.0 ws: specifier: ^8.17.0 version: 8.17.0 diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index cfee314fb..13c3a0ffd 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -97,7 +97,7 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { system.addTerminationHandler(); logger.info(`Running on port ${system.config.port}`); - await micro.signals.getSystemProbe().ready(); + await system.probe.ready(); // Enable in development to track memory usage: // trackMemoryUsage(); diff --git a/service/src/runners/stream-worker.ts b/service/src/runners/stream-worker.ts index 84d2aa3b5..08d476fc6 100644 --- a/service/src/runners/stream-worker.ts +++ b/service/src/runners/stream-worker.ts @@ -1,4 +1,3 @@ -import * as micro from '@journeyapps-platform/micro'; import { Direction } from '@journeyapps-platform/micro-migrate'; import { migrations, replication, utils, Metrics, logger } from '@powersync/service-core'; @@ -36,5 +35,5 @@ export async function startStreamWorker(runnerConfig: utils.RunnerConfig) { await mngr.stop(); }); - await micro.signals.getSystemProbe().ready(); + await system.probe.ready(); } From f39360ebe4b69e00bfb4b77922e00770c8191e7b Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 13 Jun 2024 11:36:28 +0200 Subject: [PATCH 09/36] move framework logic to framework package --- packages/rsocket-router/package.json | 2 +- .../src/router/ReactiveSocketRouter.ts | 6 ++-- .../transport/WebsocketDuplexConnection.ts | 8 +++--- packages/rsocket-router/tsconfig.json | 6 +++- packages/service-core/package.json | 1 + packages/service-core/src/api/diagnostics.ts | 2 +- packages/service-core/src/auth/KeyStore.ts | 2 +- packages/service-core/src/entry/cli-entry.ts | 2 +- packages/service-core/src/index.ts | 1 - packages/service-core/src/metrics/Metrics.ts | 2 +- .../service-core/src/replication/WalStream.ts | 7 +++-- .../src/replication/WalStreamManager.ts | 2 +- .../src/replication/WalStreamRunner.ts | 3 +- .../service-core/src/routes/socket-route.ts | 2 +- .../service-core/src/routes/sync-stream.ts | 2 +- packages/service-core/src/runner/teardown.ts | 3 +- .../src/storage/MongoBucketStorage.ts | 2 +- .../src/storage/mongo/MongoBucketBatch.ts | 2 +- .../src/storage/mongo/MongoSyncRulesLock.ts | 2 +- .../src/storage/mongo/PersistedBatch.ts | 2 +- packages/service-core/src/sync/sync.ts | 2 +- .../src/system/CorePowerSyncSystem.ts | 3 +- .../service-core/src/system/system-index.ts | 7 ----- .../impl/filesystem-config-collector.ts | 2 +- .../util/config/compound-config-collector.ts | 2 +- .../service-core/src/util/memory-tracking.ts | 2 +- .../service-core/src/util/pgwire_utils.ts | 2 +- packages/service-core/src/util/utils.ts | 2 +- packages/service-core/tsconfig.json | 3 ++ packages/service-framework/package.json | 27 ++++++++++++++++++ .../src/errors/framework-errors.ts | 0 packages/service-framework/src/index.ts | 8 ++++++ .../src/logger}/Logger.ts | 0 .../src}/signals/probes/fs-probes.ts | 0 .../src}/signals/probes/memory-probes.ts | 0 .../src}/signals/probes/probes.ts | 0 .../src}/signals/termination-handler.ts | 2 +- .../src/system/LifeCycledSystem.ts | 5 ++-- packages/service-framework/tsconfig.json | 10 +++++++ pnpm-lock.yaml | 28 +++++++++++++++---- service/package.json | 1 + service/src/runners/server.ts | 3 +- service/src/runners/stream-worker.ts | 3 +- service/tsconfig.json | 3 ++ tsconfig.json | 3 ++ 45 files changed, 123 insertions(+), 54 deletions(-) create mode 100644 packages/service-framework/package.json create mode 100644 packages/service-framework/src/errors/framework-errors.ts create mode 100644 packages/service-framework/src/index.ts rename packages/{service-core/src/system => service-framework/src/logger}/Logger.ts (100%) rename packages/{service-core/src/system => service-framework/src}/signals/probes/fs-probes.ts (100%) rename packages/{service-core/src/system => service-framework/src}/signals/probes/memory-probes.ts (100%) rename packages/{service-core/src/system => service-framework/src}/signals/probes/probes.ts (100%) rename packages/{service-core/src/system => service-framework/src}/signals/termination-handler.ts (98%) rename packages/{service-core => service-framework}/src/system/LifeCycledSystem.ts (89%) create mode 100644 packages/service-framework/tsconfig.json diff --git a/packages/rsocket-router/package.json b/packages/rsocket-router/package.json index 1c740e3ec..79aa5f46c 100644 --- a/packages/rsocket-router/package.json +++ b/packages/rsocket-router/package.json @@ -18,7 +18,7 @@ "test": "vitest" }, "dependencies": { - "@journeyapps-platform/micro": "^17.0.1", + "@powersync/service-framework": "workspace:*", "rsocket-core": "1.0.0-alpha.3", "ts-codec": "^1.2.2", "uuid": "^9.0.1", diff --git a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts index 59bc79fdb..30f566017 100644 --- a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts +++ b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts @@ -3,7 +3,6 @@ * to expose reactive websocket stream in an interface similar to * other journey micro routers. */ -import * as micro from '@journeyapps-platform/micro'; import * as http from 'http'; import { Payload, RSocketServer } from 'rsocket-core'; import * as ws from 'ws'; @@ -17,6 +16,7 @@ import { SocketResponder } from './types.js'; import { WebsocketServerTransport } from './transport/WebSocketServerTransport.js'; +import { logger } from '@powersync/service-framework'; export class ReactiveSocketRouter { constructor(protected options?: ReactiveSocketRouterOptions) {} @@ -69,7 +69,7 @@ export class ReactiveSocketRouter { const observer = new SocketRouterObserver(); handleReactiveStream(context, { payload, initialN, responder }, observer, params).catch((ex) => { - micro.logger.error(ex); + logger.error(ex); responder.onError(ex); responder.onComplete(); }); @@ -154,7 +154,7 @@ export async function handleReactiveStream( initialN }); } catch (ex) { - micro.logger.error(ex); + logger.error(ex); responder.onError(ex); responder.onComplete(); } diff --git a/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts b/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts index 665789e12..9044b9cf4 100644 --- a/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts +++ b/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts @@ -15,7 +15,7 @@ * limitations under the License. */ -import * as micro from '@journeyapps-platform/micro'; +import { logger } from '@powersync/service-framework'; import { Closeable, Deferred, @@ -93,7 +93,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect }; private handleError = (e: WebSocket.ErrorEvent): void => { - micro.logger.error(`Error in WebSocket duplex connection: ${e}`); + logger.error(`Error in WebSocket duplex connection: ${e}`); this.close(e.error); }; @@ -123,7 +123,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect throw new Error(`Unable to deserialize frame`); } } catch (ex) { - micro.logger.info(`Received error deserializing initial frame buffer. Skipping connection request.`, ex); + logger.info(`Received error deserializing initial frame buffer. Skipping connection request.`, ex); // The initial frame should always be parsable return socket.end(); } @@ -137,7 +137,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect await connectionAcceptor(frame, connection); socket.resume(); } catch (error) { - micro.logger.info(`Error accepting connection:`, error); + logger.info(`Error accepting connection:`, error); connection.close(error); } }); diff --git a/packages/rsocket-router/tsconfig.json b/packages/rsocket-router/tsconfig.json index d37ee074d..9d9458e4d 100644 --- a/packages/rsocket-router/tsconfig.json +++ b/packages/rsocket-router/tsconfig.json @@ -6,5 +6,9 @@ "rootDir": "src" }, "include": ["src"], - "references": [] + "references": [ + { + "path": "../service-framework" + } + ] } diff --git a/packages/service-core/package.json b/packages/service-core/package.json index 24207b7fb..c0dbd41f3 100644 --- a/packages/service-core/package.json +++ b/packages/service-core/package.json @@ -25,6 +25,7 @@ "@opentelemetry/sdk-metrics": "1.24.1", "@powersync/service-jpgwire": "workspace:*", "@powersync/service-jsonbig": "workspace:*", + "@powersync/service-framework": "workspace:*", "@powersync/service-rsocket-router": "workspace:*", "@powersync/service-sync-rules": "workspace:*", "@powersync/service-types": "workspace:*", diff --git a/packages/service-core/src/api/diagnostics.ts b/packages/service-core/src/api/diagnostics.ts index 125259397..08febe8f2 100644 --- a/packages/service-core/src/api/diagnostics.ts +++ b/packages/service-core/src/api/diagnostics.ts @@ -7,7 +7,7 @@ import * as storage from '../storage/storage-index.js'; import * as util from '../util/util-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; export async function getConnectionStatus(system: CorePowerSyncSystem): Promise { if (system.pgwire_pool == null) { diff --git a/packages/service-core/src/auth/KeyStore.ts b/packages/service-core/src/auth/KeyStore.ts index fdd214117..d0da68141 100644 --- a/packages/service-core/src/auth/KeyStore.ts +++ b/packages/service-core/src/auth/KeyStore.ts @@ -3,7 +3,7 @@ import secs from '../util/secs.js'; import { KeyOptions, KeySpec, SUPPORTED_ALGORITHMS } from './KeySpec.js'; import { KeyCollector } from './KeyCollector.js'; import { JwtPayload } from './JwtPayload.js'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; /** * KeyStore to get keys and verify tokens. diff --git a/packages/service-core/src/entry/cli-entry.ts b/packages/service-core/src/entry/cli-entry.ts index ee216baae..2df3f2b68 100644 --- a/packages/service-core/src/entry/cli-entry.ts +++ b/packages/service-core/src/entry/cli-entry.ts @@ -4,7 +4,7 @@ import * as utils from '../util/util-index.js'; import { registerMigrationAction } from './commands/migrate-action.js'; import { registerTearDownAction } from './commands/teardown-action.js'; import { registerStartAction } from './entry-index.js'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; /** * Generates a Commander program which serves as the entry point diff --git a/packages/service-core/src/index.ts b/packages/service-core/src/index.ts index 492512a9b..6cf074cbf 100644 --- a/packages/service-core/src/index.ts +++ b/packages/service-core/src/index.ts @@ -31,7 +31,6 @@ export * from './sync/sync-index.js'; export * as sync from './sync/sync-index.js'; export * from './system/CorePowerSyncSystem.js'; -export * from './system/Logger.js'; export * as system from './system/system-index.js'; export * from './util/util-index.js'; diff --git a/packages/service-core/src/metrics/Metrics.ts b/packages/service-core/src/metrics/Metrics.ts index 0698e1de4..3f59b8442 100644 --- a/packages/service-core/src/metrics/Metrics.ts +++ b/packages/service-core/src/metrics/Metrics.ts @@ -7,7 +7,7 @@ import * as util from '../util/util-index.js'; import * as storage from '../storage/storage-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; import { Resource } from '@opentelemetry/resources'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; export interface MetricsOptions { disable_telemetry_sharing: boolean; diff --git a/packages/service-core/src/replication/WalStream.ts b/packages/service-core/src/replication/WalStream.ts index 8646c9127..11cf7d3b3 100644 --- a/packages/service-core/src/replication/WalStream.ts +++ b/packages/service-core/src/replication/WalStream.ts @@ -9,14 +9,12 @@ import { getPgOutputRelation, getRelId, PgRelation } from './PgRelation.js'; import { getReplicationIdentityColumns } from './util.js'; import { WalConnection } from './WalConnection.js'; import { Metrics } from '../metrics/Metrics.js'; -import { logger } from '../system/Logger.js'; -import { ProbeModule } from '../system/system-index.js'; +import { logger, ProbeModule } from '@powersync/service-framework'; export const ZERO_LSN = '00000000/00000000'; export interface WalStreamOptions { connections: util.PgManager; - factory: storage.BucketStorageFactory; storage: storage.SyncRulesBucketStorage; abort_signal: AbortSignal; @@ -620,6 +618,9 @@ WHERE oid = $1::regclass`, } async touch() { + // FIXME: The hosted Kubernetes probe does not actually check the timestamp on this. + // FIXME: We need a timeout of around 5+ minutes in Kubernetes if we do start checking the timestamp, + // or reduce PING_INTERVAL here. return this.probe.touch(); } } diff --git a/packages/service-core/src/replication/WalStreamManager.ts b/packages/service-core/src/replication/WalStreamManager.ts index 88279e73d..aabab7d28 100644 --- a/packages/service-core/src/replication/WalStreamManager.ts +++ b/packages/service-core/src/replication/WalStreamManager.ts @@ -7,7 +7,7 @@ import * as util from '../util/util-index.js'; import { DefaultErrorRateLimiter } from './ErrorRateLimiter.js'; import { WalStreamRunner } from './WalStreamRunner.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; // 5 minutes const PING_INTERVAL = 1_000_000_000n * 300n; diff --git a/packages/service-core/src/replication/WalStreamRunner.ts b/packages/service-core/src/replication/WalStreamRunner.ts index fffcfa91e..a35e38d9c 100644 --- a/packages/service-core/src/replication/WalStreamRunner.ts +++ b/packages/service-core/src/replication/WalStreamRunner.ts @@ -6,8 +6,7 @@ import * as util from '../util/util-index.js'; import { ErrorRateLimiter } from './ErrorRateLimiter.js'; import { MissingReplicationSlotError, WalStream } from './WalStream.js'; import { ResolvedConnection } from '../util/config/types.js'; -import { logger } from '../system/Logger.js'; -import { ProbeModule } from '../system/system-index.js'; +import { logger, ProbeModule } from '@powersync/service-framework'; export interface WalStreamRunnerOptions { factory: storage.BucketStorageFactory; diff --git a/packages/service-core/src/routes/socket-route.ts b/packages/service-core/src/routes/socket-route.ts index cb4ac2077..06d20d5d7 100644 --- a/packages/service-core/src/routes/socket-route.ts +++ b/packages/service-core/src/routes/socket-route.ts @@ -7,7 +7,7 @@ import { streamResponse } from '../sync/sync.js'; import { SyncRoutes } from './sync-stream.js'; import { SocketRouteGenerator } from './router-socket.js'; import { Metrics } from '../metrics/Metrics.js'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; export const sync_stream_reactive: SocketRouteGenerator = (router) => router.reactiveStream(SyncRoutes.STREAM, { diff --git a/packages/service-core/src/routes/sync-stream.ts b/packages/service-core/src/routes/sync-stream.ts index 91aaa3d5d..742b71ee2 100644 --- a/packages/service-core/src/routes/sync-stream.ts +++ b/packages/service-core/src/routes/sync-stream.ts @@ -8,7 +8,7 @@ import * as util from '../util/util-index.js'; import { authUser } from './auth.js'; import { RouteGenerator } from './router.js'; import { Metrics } from '../metrics/Metrics.js'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; export enum SyncRoutes { STREAM = '/sync/stream' diff --git a/packages/service-core/src/runner/teardown.ts b/packages/service-core/src/runner/teardown.ts index 216f3978f..496039dcf 100644 --- a/packages/service-core/src/runner/teardown.ts +++ b/packages/service-core/src/runner/teardown.ts @@ -9,8 +9,7 @@ import * as db from '../db/db-index.js'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; import * as replication from '../replication/replication-index.js'; -import { logger } from '../system/Logger.js'; -import { createFSProbe } from '../system/system-index.js'; +import { logger, createFSProbe } from '@powersync/service-framework'; /** * Attempt to terminate a single sync rules instance. diff --git a/packages/service-core/src/storage/MongoBucketStorage.ts b/packages/service-core/src/storage/MongoBucketStorage.ts index 94c742198..526dd8ed9 100644 --- a/packages/service-core/src/storage/MongoBucketStorage.ts +++ b/packages/service-core/src/storage/MongoBucketStorage.ts @@ -24,7 +24,7 @@ import { SyncRuleDocument, SyncRuleState } from './mongo/models.js'; import { generateSlotName } from './mongo/util.js'; import { locks } from '@journeyapps-platform/micro'; import { v4 as uuid } from 'uuid'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; export interface MongoBucketStorageOptions extends PowerSyncMongoOptions {} diff --git a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts index 07ea7ab59..cd9701eec 100644 --- a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts +++ b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts @@ -13,7 +13,7 @@ import { MongoIdSequence } from './MongoIdSequence.js'; import { cacheKey, OperationBatch, RecordOperation } from './OperationBatch.js'; import { PersistedBatch } from './PersistedBatch.js'; import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, serializeLookup } from './util.js'; -import { logger } from '../../system/Logger.js'; +import { logger } from '@powersync/service-framework'; /** * 15MB diff --git a/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts b/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts index c407d25d3..902e19229 100644 --- a/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts +++ b/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts @@ -2,7 +2,7 @@ import crypto from 'crypto'; import { PersistedSyncRulesContent, ReplicationLock } from '../BucketStorage.js'; import { PowerSyncMongo } from './db.js'; -import { logger } from '../../system/Logger.js'; +import { logger } from '@powersync/service-framework'; /** * Manages a lock on a sync rules document, so that only one process diff --git a/packages/service-core/src/storage/mongo/PersistedBatch.ts b/packages/service-core/src/storage/mongo/PersistedBatch.ts index cbbe04543..f3b1f4fec 100644 --- a/packages/service-core/src/storage/mongo/PersistedBatch.ts +++ b/packages/service-core/src/storage/mongo/PersistedBatch.ts @@ -16,7 +16,7 @@ import { SourceKey } from './models.js'; import { serializeLookup } from './util.js'; -import { logger } from '../../system/Logger.js'; +import { logger } from '@powersync/service-framework'; /** * Maximum size of operations we write in a single transaction. diff --git a/packages/service-core/src/sync/sync.ts b/packages/service-core/src/sync/sync.ts index 55eaf5855..afe6d9fbe 100644 --- a/packages/service-core/src/sync/sync.ts +++ b/packages/service-core/src/sync/sync.ts @@ -10,7 +10,7 @@ import * as util from '../util/util-index.js'; import { mergeAsyncIterables } from './merge.js'; import { TokenStreamOptions, tokenStream } from './util.js'; import { Metrics } from '../metrics/Metrics.js'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; /** * Maximum number of connections actively fetching data. diff --git a/packages/service-core/src/system/CorePowerSyncSystem.ts b/packages/service-core/src/system/CorePowerSyncSystem.ts index ec4b50b53..71fca4542 100644 --- a/packages/service-core/src/system/CorePowerSyncSystem.ts +++ b/packages/service-core/src/system/CorePowerSyncSystem.ts @@ -1,9 +1,8 @@ import * as pgwire from '@powersync/service-jpgwire'; +import { LifeCycledSystem, LifeCycledSystemOptions, logger } from '@powersync/service-framework'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; -import { LifeCycledSystem, LifeCycledSystemOptions } from './LifeCycledSystem.js'; -import { logger } from './Logger.js'; export abstract class CorePowerSyncSystem extends LifeCycledSystem { abstract storage: storage.BucketStorageFactory; diff --git a/packages/service-core/src/system/system-index.ts b/packages/service-core/src/system/system-index.ts index 26422472f..f40b2a60e 100644 --- a/packages/service-core/src/system/system-index.ts +++ b/packages/service-core/src/system/system-index.ts @@ -1,8 +1 @@ export * from './CorePowerSyncSystem.js'; -export * from './LifeCycledSystem.js'; -export * from './Logger.js'; - -export * from './signals/termination-handler.js'; -export * from './signals/probes/fs-probes.js'; -export * from './signals/probes/memory-probes.js'; -export * from './signals/probes/probes.js'; diff --git a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts index f7da7e3c4..ee2d815a8 100644 --- a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts +++ b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts @@ -1,9 +1,9 @@ import * as fs from 'fs/promises'; import * as path from 'path'; +import { logger } from '@powersync/service-framework'; import { ConfigCollector, ConfigFileFormat } from '../config-collector.js'; import { RunnerConfig } from '../../types.js'; -import { logger } from '../../../../system/Logger.js'; export class FileSystemConfigCollector extends ConfigCollector { get name(): string { diff --git a/packages/service-core/src/util/config/compound-config-collector.ts b/packages/service-core/src/util/config/compound-config-collector.ts index 61a2dcb4d..8a8009974 100644 --- a/packages/service-core/src/util/config/compound-config-collector.ts +++ b/packages/service-core/src/util/config/compound-config-collector.ts @@ -9,7 +9,7 @@ import { Base64SyncRulesCollector } from './sync-rules/impl/base64-sync-rules-co import { InlineSyncRulesCollector } from './sync-rules/impl/inline-sync-rules-collector.js'; import { FileSystemSyncRulesCollector } from './sync-rules/impl/filesystem-sync-rules-collector.js'; import { FallbackConfigCollector } from './collectors/impl/fallback-config-collector.js'; -import { logger } from '../../system/Logger.js'; +import { logger } from '@powersync/service-framework'; const POWERSYNC_DEV_KID = 'powersync-dev'; diff --git a/packages/service-core/src/util/memory-tracking.ts b/packages/service-core/src/util/memory-tracking.ts index 4e1e6d05c..d7c096ddf 100644 --- a/packages/service-core/src/util/memory-tracking.ts +++ b/packages/service-core/src/util/memory-tracking.ts @@ -1,4 +1,4 @@ -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; /** * Track and log memory usage. diff --git a/packages/service-core/src/util/pgwire_utils.ts b/packages/service-core/src/util/pgwire_utils.ts index 80becc70a..9de64892a 100644 --- a/packages/service-core/src/util/pgwire_utils.ts +++ b/packages/service-core/src/util/pgwire_utils.ts @@ -6,7 +6,7 @@ import * as pgwire from '@powersync/service-jpgwire'; import { SqliteJsonValue, SqliteRow, ToastableSqliteRow, toSyncRulesRow } from '@powersync/service-sync-rules'; import * as replication from '../replication/replication-index.js'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; /** * pgwire message -> SQLite row. diff --git a/packages/service-core/src/util/utils.ts b/packages/service-core/src/util/utils.ts index 725c50d56..ff1d98ea1 100644 --- a/packages/service-core/src/util/utils.ts +++ b/packages/service-core/src/util/utils.ts @@ -5,7 +5,7 @@ import { pgwireRows } from '@powersync/service-jpgwire'; import * as storage from '../storage/storage-index.js'; import { BucketChecksum, OpId } from './protocol-types.js'; import { retriedQuery } from './pgwire_utils.js'; -import { logger } from '../system/Logger.js'; +import { logger } from '@powersync/service-framework'; export function hashData(type: string, id: string, data: string): number { const hash = crypto.createHash('sha256'); diff --git a/packages/service-core/tsconfig.json b/packages/service-core/tsconfig.json index 4504307ab..16e5ad0df 100644 --- a/packages/service-core/tsconfig.json +++ b/packages/service-core/tsconfig.json @@ -23,6 +23,9 @@ }, { "path": "../sync-rules" + }, + { + "path": "../service-framework" } ] } diff --git a/packages/service-framework/package.json b/packages/service-framework/package.json new file mode 100644 index 000000000..33b2fe617 --- /dev/null +++ b/packages/service-framework/package.json @@ -0,0 +1,27 @@ +{ + "name": "@powersync/service-framework", + "repository": "https://github.com/powersync-ja/powersync-service", + "version": "0.0.1", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "license": "FSL-1.1-Apache-2.0", + "publishConfig": { + "access": "restricted" + }, + "files": [ + "dist/**/*" + ], + "scripts": { + "clean": "rm -r ./dist && tsc -b --clean", + "build": "tsc -b", + "test": "vitest" + }, + "keywords": [], + "dependencies": { + "lodash": "^4.17.21", + "winston": "^3.13.0" + }, + "devDependencies": { + "@types/lodash": "^4.17.5" + } +} diff --git a/packages/service-framework/src/errors/framework-errors.ts b/packages/service-framework/src/errors/framework-errors.ts new file mode 100644 index 000000000..e69de29bb diff --git a/packages/service-framework/src/index.ts b/packages/service-framework/src/index.ts new file mode 100644 index 000000000..181c12826 --- /dev/null +++ b/packages/service-framework/src/index.ts @@ -0,0 +1,8 @@ +export * from './logger/Logger'; + +export * from './signals/probes/fs-probes'; +export * from './signals/probes/memory-probes'; +export * from './signals/probes/probes'; +export * from './signals/termination-handler'; + +export * from './system/LifeCycledSystem'; diff --git a/packages/service-core/src/system/Logger.ts b/packages/service-framework/src/logger/Logger.ts similarity index 100% rename from packages/service-core/src/system/Logger.ts rename to packages/service-framework/src/logger/Logger.ts diff --git a/packages/service-core/src/system/signals/probes/fs-probes.ts b/packages/service-framework/src/signals/probes/fs-probes.ts similarity index 100% rename from packages/service-core/src/system/signals/probes/fs-probes.ts rename to packages/service-framework/src/signals/probes/fs-probes.ts diff --git a/packages/service-core/src/system/signals/probes/memory-probes.ts b/packages/service-framework/src/signals/probes/memory-probes.ts similarity index 100% rename from packages/service-core/src/system/signals/probes/memory-probes.ts rename to packages/service-framework/src/signals/probes/memory-probes.ts diff --git a/packages/service-core/src/system/signals/probes/probes.ts b/packages/service-framework/src/signals/probes/probes.ts similarity index 100% rename from packages/service-core/src/system/signals/probes/probes.ts rename to packages/service-framework/src/signals/probes/probes.ts diff --git a/packages/service-core/src/system/signals/termination-handler.ts b/packages/service-framework/src/signals/termination-handler.ts similarity index 98% rename from packages/service-core/src/system/signals/termination-handler.ts rename to packages/service-framework/src/signals/termination-handler.ts index 31e8f26b3..92170acde 100644 --- a/packages/service-core/src/system/signals/termination-handler.ts +++ b/packages/service-framework/src/signals/termination-handler.ts @@ -1,5 +1,5 @@ import _ from 'lodash'; -import { logger } from '../Logger.js'; +import { logger } from '../logger/Logger'; export enum Signal { SIGTERM = 'SIGTERM', diff --git a/packages/service-core/src/system/LifeCycledSystem.ts b/packages/service-framework/src/system/LifeCycledSystem.ts similarity index 89% rename from packages/service-core/src/system/LifeCycledSystem.ts rename to packages/service-framework/src/system/LifeCycledSystem.ts index 2658ef4cb..8736a4c2b 100644 --- a/packages/service-core/src/system/LifeCycledSystem.ts +++ b/packages/service-framework/src/system/LifeCycledSystem.ts @@ -6,8 +6,9 @@ * A System can contain anything but should offer a `start` and `stop` operation */ -import { TerminationHandler, createTerminationHandler } from './signals/termination-handler.js'; -import { ProbeModule, createFSProbe } from './system-index.js'; +import { createFSProbe } from '../signals/probes/fs-probes'; +import { ProbeModule } from '../signals/probes/probes'; +import { TerminationHandler, createTerminationHandler } from '../signals/termination-handler'; export type LifecycleCallback = (singleton: T) => Promise | void; diff --git a/packages/service-framework/tsconfig.json b/packages/service-framework/tsconfig.json new file mode 100644 index 000000000..d37ee074d --- /dev/null +++ b/packages/service-framework/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "dist", + "declarationDir": "dist", + "rootDir": "src" + }, + "include": ["src"], + "references": [] +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 13e525f12..fda0fe6df 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -74,9 +74,9 @@ importers: packages/rsocket-router: dependencies: - '@journeyapps-platform/micro': - specifier: ^17.0.1 - version: 17.0.1(encoding@0.1.13)(socks@2.8.3) + '@powersync/service-framework': + specifier: workspace:* + version: link:../service-framework rsocket-core: specifier: 1.0.0-alpha.3 version: 1.0.0-alpha.3 @@ -86,9 +86,6 @@ importers: uuid: specifier: ^9.0.1 version: 9.0.1 - winston: - specifier: ^3.13.0 - version: 3.13.0 ws: specifier: ^8.17.0 version: 8.17.0 @@ -135,6 +132,9 @@ importers: '@opentelemetry/sdk-metrics': specifier: 1.24.1 version: 1.24.1(@opentelemetry/api@1.8.0) + '@powersync/service-framework': + specifier: workspace:* + version: link:../service-framework '@powersync/service-jpgwire': specifier: workspace:* version: link:../jpgwire @@ -218,6 +218,19 @@ importers: specifier: ^0.34.6 version: 0.34.6 + packages/service-framework: + dependencies: + lodash: + specifier: ^4.17.21 + version: 4.17.21 + winston: + specifier: ^3.13.0 + version: 3.13.0 + devDependencies: + '@types/lodash': + specifier: ^4.17.5 + version: 4.17.5 + packages/sync-rules: dependencies: '@powersync/service-jsonbig': @@ -272,6 +285,9 @@ importers: '@powersync/service-core': specifier: workspace:* version: link:../packages/service-core + '@powersync/service-framework': + specifier: workspace:* + version: link:../packages/service-framework '@powersync/service-jpgwire': specifier: workspace:* version: link:../packages/jpgwire diff --git a/service/package.json b/service/package.json index fd4f6ec62..59acbb256 100644 --- a/service/package.json +++ b/service/package.json @@ -17,6 +17,7 @@ "@opentelemetry/exporter-prometheus": "^0.43.0", "@opentelemetry/sdk-metrics": "^1.17.0", "@powersync/service-core": "workspace:*", + "@powersync/service-framework": "workspace:*", "@powersync/service-jpgwire": "workspace:*", "@powersync/service-jsonbig": "workspace:*", "@powersync/service-rsocket-router": "workspace:*", diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index 13c3a0ffd..f77617a32 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -3,7 +3,8 @@ import fastify from 'fastify'; import cors from '@fastify/cors'; import * as micro from '@journeyapps-platform/micro'; import { RSocketRequestMeta } from '@powersync/service-rsocket-router'; -import { Metrics, logger, routes, utils } from '@powersync/service-core'; +import { Metrics, routes, utils } from '@powersync/service-core'; +import { logger } from '@powersync/service-framework'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; import { Router, SocketRouter, StreamingRouter } from '../routes/router.js'; diff --git a/service/src/runners/stream-worker.ts b/service/src/runners/stream-worker.ts index 08d476fc6..6bb23d488 100644 --- a/service/src/runners/stream-worker.ts +++ b/service/src/runners/stream-worker.ts @@ -1,5 +1,6 @@ import { Direction } from '@journeyapps-platform/micro-migrate'; -import { migrations, replication, utils, Metrics, logger } from '@powersync/service-core'; +import { migrations, replication, utils, Metrics } from '@powersync/service-core'; +import { logger } from '@powersync/service-framework'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; diff --git a/service/tsconfig.json b/service/tsconfig.json index 7efe0ecef..636c76cca 100644 --- a/service/tsconfig.json +++ b/service/tsconfig.json @@ -21,6 +21,9 @@ { "path": "../packages/service-core" }, + { + "path": "../packages/service-framework" + }, { "path": "../packages/sync-rules" }, diff --git a/tsconfig.json b/tsconfig.json index a8bef0c63..60a401f3d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -16,6 +16,9 @@ { "path": "./packages/sync-rules" }, + { + "path": "./packages/service-framework" + }, { "path": "./packages/types" }, From 65dcd3b72ff998e77105e45e2cc941b1a2e2dce6 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 13 Jun 2024 13:31:56 +0200 Subject: [PATCH 10/36] move framework definitions seperately from implementation --- .gitignore | 1 + packages/rsocket-router/package.json | 1 + .../src/router/ReactiveSocketRouter.ts | 16 +- .../transport/WebSocketServerTransport.ts | 4 +- .../transport/WebsocketDuplexConnection.ts | 8 +- .../service-core/src/replication/WalStream.ts | 6 +- packages/service-core/src/routes/admin.ts | 3 +- packages/service-core/src/routes/dev.ts | 9 +- .../service-core/src/routes/socket-route.ts | 7 +- .../service-core/src/routes/sync-rules.ts | 9 +- .../service-core/src/routes/sync-stream.ts | 8 +- .../src/storage/mongo/MongoBucketBatch.ts | 24 +-- .../service-core/test/src/slow_tests.test.ts | 7 +- .../service-core/test/src/wal_stream_utils.ts | 4 +- packages/service-framework/package.json | 4 +- .../src/errors/errors-index.ts | 2 + .../src/errors/framework-errors.ts | 152 ++++++++++++++++++ .../service-framework/src/errors/utils.ts | 20 +++ packages/service-framework/src/index.ts | 12 +- .../src/logger/logger-index.ts | 1 + .../src/signals/signals-index.ts | 4 + .../src/system/system-index.ts | 1 + .../test/__snapshots__/errors.test.ts.snap | 10 ++ .../src/__snapshots__/errors.test.ts.snap | 10 ++ .../service-framework/test/src/errors.test.ts | 49 ++++++ packages/service-framework/test/tsconfig.json | 16 ++ pnpm-lock.yaml | 6 + service/src/entry.ts | 8 +- service/src/runners/server.ts | 14 +- 29 files changed, 353 insertions(+), 63 deletions(-) create mode 100644 packages/service-framework/src/errors/errors-index.ts create mode 100644 packages/service-framework/src/errors/utils.ts create mode 100644 packages/service-framework/src/logger/logger-index.ts create mode 100644 packages/service-framework/src/signals/signals-index.ts create mode 100644 packages/service-framework/src/system/system-index.ts create mode 100644 packages/service-framework/test/__snapshots__/errors.test.ts.snap create mode 100644 packages/service-framework/test/src/__snapshots__/errors.test.ts.snap create mode 100644 packages/service-framework/test/src/errors.test.ts create mode 100644 packages/service-framework/test/tsconfig.json diff --git a/.gitignore b/.gitignore index 63c24cd3d..9be3d7003 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,7 @@ npm-error.log .pnpm-debug.log .local-dev .probes +powersync.yaml packages/*/manifest.json diff --git a/packages/rsocket-router/package.json b/packages/rsocket-router/package.json index 79aa5f46c..3ddc9400f 100644 --- a/packages/rsocket-router/package.json +++ b/packages/rsocket-router/package.json @@ -18,6 +18,7 @@ "test": "vitest" }, "dependencies": { + "@journeyapps-platform/micro": "^17.0.1", "@powersync/service-framework": "workspace:*", "rsocket-core": "1.0.0-alpha.3", "ts-codec": "^1.2.2", diff --git a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts index 30f566017..f6ac6f6d3 100644 --- a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts +++ b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts @@ -16,7 +16,7 @@ import { SocketResponder } from './types.js'; import { WebsocketServerTransport } from './transport/WebSocketServerTransport.js'; -import { logger } from '@powersync/service-framework'; +import * as framework from '@powersync/service-framework'; export class ReactiveSocketRouter { constructor(protected options?: ReactiveSocketRouterOptions) {} @@ -58,7 +58,7 @@ export class ReactiveSocketRouter { // Throwing an exception in this context will be returned to the client side request if (!payload.metadata) { // Meta data is required for endpoint handler path matching - throw new micro.errors.AuthorizationError('No context meta data provided'); + throw new framework.errors.AuthorizationError('No context meta data provided'); } const context = await params.contextProvider(payload.metadata!); @@ -69,7 +69,7 @@ export class ReactiveSocketRouter { const observer = new SocketRouterObserver(); handleReactiveStream(context, { payload, initialN, responder }, observer, params).catch((ex) => { - logger.error(ex); + framework.logger.error(ex); responder.onError(ex); responder.onComplete(); }); @@ -115,7 +115,7 @@ export async function handleReactiveStream( }; if (!metadata) { - return exitWithError(new micro.errors.ValidationError('Metadata is not provided')); + return exitWithError(new framework.errors.ValidationError('Metadata is not provided')); } const meta = await params.metaDecoder(metadata); @@ -125,7 +125,7 @@ export async function handleReactiveStream( const route = params.endpoints.find((e) => e.path == path && e.type == RS_ENDPOINT_TYPE.STREAM); if (!route) { - return exitWithError(new micro.errors.ResourceNotFound('route', `No route for ${path} is configured`)); + return exitWithError(new framework.errors.ResourceNotFound('route', `No route for ${path} is configured`)); } const { handler, authorize, validator, decoder = params.payloadDecoder } = route; @@ -134,14 +134,14 @@ export async function handleReactiveStream( if (validator) { const isValid = validator.validate(requestPayload); if (!isValid.valid) { - return exitWithError(new micro.errors.ValidationError(isValid.errors)); + return exitWithError(new framework.errors.ValidationError(isValid.errors)); } } if (authorize) { const isAuthorized = await authorize({ params: requestPayload, context, observer, responder }); if (!isAuthorized.authorized) { - return exitWithError(new micro.errors.AuthorizationError(isAuthorized.errors)); + return exitWithError(new framework.errors.AuthorizationError(isAuthorized.errors)); } } @@ -154,7 +154,7 @@ export async function handleReactiveStream( initialN }); } catch (ex) { - logger.error(ex); + framework.logger.error(ex); responder.onError(ex); responder.onComplete(); } diff --git a/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts b/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts index 40e6338c7..a1bc5cf1b 100644 --- a/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts +++ b/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts @@ -28,7 +28,7 @@ import { } from 'rsocket-core'; import * as WebSocket from 'ws'; import { WebsocketDuplexConnection } from './WebsocketDuplexConnection.js'; -import * as micro from '@journeyapps-platform/micro'; +import * as framework from '@powersync/service-framework'; export type SocketFactory = (options: SocketOptions) => WebSocket.WebSocketServer; @@ -76,7 +76,7 @@ export class WebsocketServerTransport implements ServerTransport { const duplex = WebSocket.createWebSocketStream(websocket); WebsocketDuplexConnection.create(duplex, connectionAcceptor, multiplexerDemultiplexerFactory, websocket); } catch (ex) { - micro.logger.error(`Could not create duplex connection`, ex); + framework.logger.error(`Could not create duplex connection`, ex); if (websocket.readyState == websocket.OPEN) { websocket.close(); } diff --git a/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts b/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts index 9044b9cf4..287e4bbc6 100644 --- a/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts +++ b/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts @@ -15,7 +15,7 @@ * limitations under the License. */ -import { logger } from '@powersync/service-framework'; +import * as framework from '@powersync/service-framework'; import { Closeable, Deferred, @@ -93,7 +93,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect }; private handleError = (e: WebSocket.ErrorEvent): void => { - logger.error(`Error in WebSocket duplex connection: ${e}`); + framework.logger.error(`Error in WebSocket duplex connection: ${e}`); this.close(e.error); }; @@ -123,7 +123,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect throw new Error(`Unable to deserialize frame`); } } catch (ex) { - logger.info(`Received error deserializing initial frame buffer. Skipping connection request.`, ex); + framework.logger.info(`Received error deserializing initial frame buffer. Skipping connection request.`, ex); // The initial frame should always be parsable return socket.end(); } @@ -137,7 +137,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect await connectionAcceptor(frame, connection); socket.resume(); } catch (error) { - logger.info(`Error accepting connection:`, error); + framework.logger.info(`Error accepting connection:`, error); connection.close(error); } }); diff --git a/packages/service-core/src/replication/WalStream.ts b/packages/service-core/src/replication/WalStream.ts index 11cf7d3b3..1ea0f1bd1 100644 --- a/packages/service-core/src/replication/WalStream.ts +++ b/packages/service-core/src/replication/WalStream.ts @@ -1,5 +1,5 @@ import * as pgwire from '@powersync/service-jpgwire'; -import * as micro from '@journeyapps-platform/micro'; +import * as framework from '@powersync/service-framework'; import { SqliteRow, SqlSyncRules, TablePattern, toSyncRulesRow } from '@powersync/service-sync-rules'; import * as storage from '../storage/storage-index.js'; @@ -202,7 +202,7 @@ export class WalStream { if (i == 0) { util.captureException(last_error, { - level: micro.errors.ErrorSeverity.ERROR, + level: framework.errors.ErrorSeverity.ERROR, metadata: { replication_slot: slotName } @@ -243,7 +243,7 @@ export class WalStream { /publication.*does not exist/.test(e.message) ) { util.captureException(e, { - level: micro.errors.ErrorSeverity.WARNING, + level: framework.errors.ErrorSeverity.WARNING, metadata: { try_index: i, replication_slot: slotName diff --git a/packages/service-core/src/routes/admin.ts b/packages/service-core/src/routes/admin.ts index c12065d6b..27ceeaa83 100644 --- a/packages/service-core/src/routes/admin.ts +++ b/packages/service-core/src/routes/admin.ts @@ -1,4 +1,5 @@ import * as micro from '@journeyapps-platform/micro'; +import * as framework from '@powersync/service-framework'; import { SqlSyncRules, SqliteValue, StaticSchema, isJsonValue, toSyncRulesValue } from '@powersync/service-sync-rules'; import { internal_routes } from '@powersync/service-types'; @@ -142,7 +143,7 @@ export const reprocess: RouteGenerator = (router) => const active = await storage.getActiveSyncRules(); if (active == null) { - throw new micro.errors.JourneyError({ + throw new framework.errors.JourneyError({ status: 422, code: 'NO_SYNC_RULES', description: 'No active sync rules' diff --git a/packages/service-core/src/routes/dev.ts b/packages/service-core/src/routes/dev.ts index 6967462e9..58cc14eef 100644 --- a/packages/service-core/src/routes/dev.ts +++ b/packages/service-core/src/routes/dev.ts @@ -1,5 +1,6 @@ import * as t from 'ts-codec'; import * as micro from '@journeyapps-platform/micro'; +import * as framework from '@powersync/service-framework'; import * as pgwire from '@powersync/service-jpgwire'; import * as util from '../util/util-index.js'; @@ -20,14 +21,14 @@ export const auth: RouteGenerator = (router) => const config = payload.context.system.config; if (config.dev.demo_auth == false || config.dev.demo_password == null) { - throw new micro.errors.AuthorizationError(['Demo auth disabled']); + throw new framework.errors.AuthorizationError(['Demo auth disabled']); } if (password == config.dev.demo_password) { const token = await issueLegacyDevToken(payload.request, user, payload.context.system.config); return { token, user_id: user, endpoint: endpoint(payload.request) }; } else { - throw new micro.errors.AuthorizationError(['Authentication failed']); + throw new framework.errors.AuthorizationError(['Authentication failed']); } } }); @@ -40,14 +41,14 @@ export const auth2: RouteGenerator = (router) => const config = payload.context.system.config; if (config.dev.demo_auth == false || config.dev.demo_password == null) { - throw new micro.errors.AuthorizationError(['Demo auth disabled']); + throw new framework.errors.AuthorizationError(['Demo auth disabled']); } if (password == config.dev.demo_password) { const token = await issueDevToken(payload.request, user, payload.context.system.config); return { token, user_id: user }; } else { - throw new micro.errors.AuthorizationError(['Authentication failed']); + throw new framework.errors.AuthorizationError(['Authentication failed']); } } }); diff --git a/packages/service-core/src/routes/socket-route.ts b/packages/service-core/src/routes/socket-route.ts index 06d20d5d7..f84813b80 100644 --- a/packages/service-core/src/routes/socket-route.ts +++ b/packages/service-core/src/routes/socket-route.ts @@ -1,6 +1,7 @@ import { serialize } from 'bson'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; import * as micro from '@journeyapps-platform/micro'; +import * as framework from '@powersync/service-framework'; import * as util from '../util/util-index.js'; import { streamResponse } from '../sync/sync.js'; @@ -23,7 +24,7 @@ export const sync_stream_reactive: SocketRouteGenerator = (router) => if (system.closed) { responder.onError( - new micro.errors.JourneyError({ + new framework.errors.JourneyError({ status: 503, code: 'SERVICE_UNAVAILABLE', description: 'Service temporarily unavailable' @@ -41,7 +42,7 @@ export const sync_stream_reactive: SocketRouteGenerator = (router) => const cp = await storage.getActiveCheckpoint(); if (!cp.hasSyncRules()) { responder.onError( - new micro.errors.JourneyError({ + new framework.errors.JourneyError({ status: 500, code: 'NO_SYNC_RULES', description: 'No sync rules available' @@ -119,7 +120,7 @@ export const sync_stream_reactive: SocketRouteGenerator = (router) => } catch (ex) { // Convert to our standard form before responding. // This ensures the error can be serialized. - const error = new micro.errors.InternalServerError(ex); + const error = new framework.errors.InternalServerError(ex); logger.error('Sync stream error', error); responder.onError(error); } finally { diff --git a/packages/service-core/src/routes/sync-rules.ts b/packages/service-core/src/routes/sync-rules.ts index ede9d2457..47c19c96c 100644 --- a/packages/service-core/src/routes/sync-rules.ts +++ b/packages/service-core/src/routes/sync-rules.ts @@ -1,6 +1,7 @@ import * as t from 'ts-codec'; import { FastifyPluginAsync, FastifyReply } from 'fastify'; import * as micro from '@journeyapps-platform/micro'; +import * as framework from '@powersync/service-framework'; import * as pgwire from '@powersync/service-jpgwire'; import { SqlSyncRules, SyncRulesErrors } from '@powersync/service-sync-rules'; @@ -29,7 +30,7 @@ export const deploySyncRules: RouteGenerator = (router) => handler: async (payload) => { if (payload.context.system.config.sync_rules.present) { // If sync rules are configured via the config, disable deploy via the API. - throw new micro.errors.JourneyError({ + throw new framework.errors.JourneyError({ status: 422, code: 'API_DISABLED', description: 'Sync rules API disabled', @@ -41,7 +42,7 @@ export const deploySyncRules: RouteGenerator = (router) => try { SqlSyncRules.fromYaml(payload.params.content); } catch (e) { - throw new micro.errors.JourneyError({ + throw new framework.errors.JourneyError({ status: 422, code: 'INVALID_SYNC_RULES', description: 'Sync rules parsing failed', @@ -85,7 +86,7 @@ export const currentSyncRules: RouteGenerator = (router) => const storage = payload.context.system.storage; const sync_rules = await storage.getActiveSyncRulesContent(); if (!sync_rules) { - throw new micro.errors.JourneyError({ + throw new framework.errors.JourneyError({ status: 422, code: 'NO_SYNC_RULES', description: 'No active sync rules' @@ -127,7 +128,7 @@ export const reprocessSyncRules: RouteGenerator = (router) => const storage = payload.context.system.storage; const sync_rules = await storage.getActiveSyncRules(); if (sync_rules == null) { - throw new micro.errors.JourneyError({ + throw new framework.errors.JourneyError({ status: 422, code: 'NO_SYNC_RULES', description: 'No active sync rules' diff --git a/packages/service-core/src/routes/sync-stream.ts b/packages/service-core/src/routes/sync-stream.ts index 742b71ee2..3114f9f8b 100644 --- a/packages/service-core/src/routes/sync-stream.ts +++ b/packages/service-core/src/routes/sync-stream.ts @@ -8,7 +8,7 @@ import * as util from '../util/util-index.js'; import { authUser } from './auth.js'; import { RouteGenerator } from './router.js'; import { Metrics } from '../metrics/Metrics.js'; -import { logger } from '@powersync/service-framework'; +import * as framework from '@powersync/service-framework'; export enum SyncRoutes { STREAM = '/sync/stream' @@ -23,7 +23,7 @@ export const syncStreamed: RouteGenerator = (router) => const system = payload.context.system; if (system.closed) { - throw new micro.errors.JourneyError({ + throw new framework.errors.JourneyError({ status: 503, code: 'SERVICE_UNAVAILABLE', description: 'Service temporarily unavailable' @@ -37,7 +37,7 @@ export const syncStreamed: RouteGenerator = (router) => // Sanity check before we start the stream const cp = await storage.getActiveCheckpoint(); if (!cp.hasSyncRules()) { - throw new micro.errors.JourneyError({ + throw new framework.errors.JourneyError({ status: 500, code: 'NO_SYNC_RULES', description: 'No sync rules available' @@ -79,7 +79,7 @@ export const syncStreamed: RouteGenerator = (router) => controller.abort(); // Note: This appears as a 200 response in the logs. if (error.message != 'Shutting down system') { - logger.error('Streaming sync request failed', error); + framework.logger.error('Streaming sync request failed', error); } }); await res.send(stream); diff --git a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts index cd9701eec..6d5334844 100644 --- a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts +++ b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts @@ -3,6 +3,7 @@ import { SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules'; import * as bson from 'bson'; import * as mongo from 'mongodb'; +import * as framework from '@powersync/service-framework'; import * as util from '../../util/util-index.js'; import * as replication from '../../replication/replication-index.js'; import { BucketStorageBatch, FlushedResult, mergeToast, SaveOptions } from '../BucketStorage.js'; @@ -13,7 +14,6 @@ import { MongoIdSequence } from './MongoIdSequence.js'; import { cacheKey, OperationBatch, RecordOperation } from './OperationBatch.js'; import { PersistedBatch } from './PersistedBatch.js'; import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, serializeLookup } from './util.js'; -import { logger } from '@powersync/service-framework'; /** * 15MB @@ -282,7 +282,7 @@ export class MongoBucketBatch implements BucketStorageBatch { micro.alerts.captureMessage( `Data too big on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${e.message}`, { - level: micro.errors.ErrorSeverity.WARNING, + level: framework.errors.ErrorSeverity.WARNING, metadata: { replication_slot: this.slot_name, table: record.sourceTable.qualifiedName @@ -339,14 +339,14 @@ export class MongoBucketBatch implements BucketStorageBatch { micro.alerts.captureMessage( `Failed to evaluate data query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}`, { - level: micro.errors.ErrorSeverity.WARNING, + level: framework.errors.ErrorSeverity.WARNING, metadata: { replication_slot: this.slot_name, table: record.sourceTable.qualifiedName } } ); - logger.error( + framework.logger.error( `Failed to evaluate data query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}` ); } @@ -379,14 +379,14 @@ export class MongoBucketBatch implements BucketStorageBatch { micro.alerts.captureMessage( `Failed to evaluate parameter query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}`, { - level: micro.errors.ErrorSeverity.WARNING, + level: framework.errors.ErrorSeverity.WARNING, metadata: { replication_slot: this.slot_name, table: record.sourceTable.qualifiedName } } ); - logger.error( + framework.logger.error( `Failed to evaluate parameter query on ${record.sourceTable.qualifiedName}.${after.id}: ${error.error}` ); } @@ -440,7 +440,7 @@ export class MongoBucketBatch implements BucketStorageBatch { if (e instanceof mongo.MongoError && e.hasErrorLabel('TransientTransactionError')) { // Likely write conflict caused by concurrent write stream replicating } else { - logger.warn('Transaction error', e as Error); + framework.logger.warn('Transaction error', e as Error); } await new Promise((resolve) => setTimeout(resolve, Math.random() * 50)); throw e; @@ -465,7 +465,7 @@ export class MongoBucketBatch implements BucketStorageBatch { await this.withTransaction(async () => { flushTry += 1; if (flushTry % 10 == 0) { - logger.info(`${this.slot_name} ${description} - try ${flushTry}`); + framework.logger.info(`${this.slot_name} ${description} - try ${flushTry}`); } if (flushTry > 20 && Date.now() > lastTry) { throw new Error('Max transaction tries exceeded'); @@ -530,11 +530,13 @@ export class MongoBucketBatch implements BucketStorageBatch { if (this.last_checkpoint_lsn != null && lsn <= this.last_checkpoint_lsn) { // When re-applying transactions, don't create a new checkpoint until // we are past the last transaction. - logger.info(`Re-applied transaction ${lsn} - skipping checkpoint`); + framework.logger.info(`Re-applied transaction ${lsn} - skipping checkpoint`); return false; } if (lsn < this.no_checkpoint_before_lsn) { - logger.info(`Waiting until ${this.no_checkpoint_before_lsn} before creating checkpoint, currently at ${lsn}`); + framework.logger.info( + `Waiting until ${this.no_checkpoint_before_lsn} before creating checkpoint, currently at ${lsn}` + ); return false; } @@ -598,7 +600,7 @@ export class MongoBucketBatch implements BucketStorageBatch { } async save(record: SaveOptions): Promise { - logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`); + framework.logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`); this.batch ??= new OperationBatch(); this.batch.push(new RecordOperation(record)); diff --git a/packages/service-core/test/src/slow_tests.test.ts b/packages/service-core/test/src/slow_tests.test.ts index fd8fc7728..ccc32abad 100644 --- a/packages/service-core/test/src/slow_tests.test.ts +++ b/packages/service-core/test/src/slow_tests.test.ts @@ -10,6 +10,7 @@ import * as pgwire from '@powersync/service-jpgwire'; import { SqliteRow } from '@powersync/service-sync-rules'; import { MongoBucketStorage } from '../../src/storage/MongoBucketStorage.js'; import { PgManager } from '../../src/util/PgManager.js'; +import { createInMemoryProbe } from '@powersync/service-framework'; describe('slow tests - mongodb', function () { // These are slow, inconsistent tests. @@ -71,7 +72,8 @@ bucket_definitions: abort_signal: abortController.signal, connections, storage: storage, - factory: f + factory: f, + probe: createInMemoryProbe() }; walStream = new WalStream(options); @@ -186,7 +188,8 @@ bucket_definitions: abort_signal: abortController.signal, connections, storage: storage, - factory: f + factory: f, + probe: createInMemoryProbe() }; walStream = new WalStream(options); diff --git a/packages/service-core/test/src/wal_stream_utils.ts b/packages/service-core/test/src/wal_stream_utils.ts index 92ad02580..b32be9c4e 100644 --- a/packages/service-core/test/src/wal_stream_utils.ts +++ b/packages/service-core/test/src/wal_stream_utils.ts @@ -6,6 +6,7 @@ import { getClientCheckpoint } from '../../src/util/utils.js'; import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js'; import { PgManager } from '../../src/util/PgManager.js'; import { JSONBig } from '@powersync/service-jsonbig'; +import { createInMemoryProbe } from '@powersync/service-framework'; /** * Tests operating on the wal stream need to configure the stream and manage asynchronous @@ -69,7 +70,8 @@ export class WalStreamTestContext { storage: this.storage, factory: this.factory, connections: this.connections, - abort_signal: this.abortController.signal + abort_signal: this.abortController.signal, + probe: createInMemoryProbe() }; this._walStream = new WalStream(options); return this._walStream!; diff --git a/packages/service-framework/package.json b/packages/service-framework/package.json index 33b2fe617..3aedfdb77 100644 --- a/packages/service-framework/package.json +++ b/packages/service-framework/package.json @@ -14,6 +14,7 @@ "scripts": { "clean": "rm -r ./dist && tsc -b --clean", "build": "tsc -b", + "build:tests": "tsc -b test/tsconfig.json", "test": "vitest" }, "keywords": [], @@ -22,6 +23,7 @@ "winston": "^3.13.0" }, "devDependencies": { - "@types/lodash": "^4.17.5" + "@types/lodash": "^4.17.5", + "vitest": "^0.34.6" } } diff --git a/packages/service-framework/src/errors/errors-index.ts b/packages/service-framework/src/errors/errors-index.ts new file mode 100644 index 000000000..12d153d5c --- /dev/null +++ b/packages/service-framework/src/errors/errors-index.ts @@ -0,0 +1,2 @@ +export * from './framework-errors'; +export * from './utils'; diff --git a/packages/service-framework/src/errors/framework-errors.ts b/packages/service-framework/src/errors/framework-errors.ts index e69de29bb..cbde26db0 100644 --- a/packages/service-framework/src/errors/framework-errors.ts +++ b/packages/service-framework/src/errors/framework-errors.ts @@ -0,0 +1,152 @@ +/** + * This error class is designed to give consumers of Journey Micro + * a consistent way of "throwing" errors. Specifically, these errors + * will give clients to Journey Micro implementations two things: + * + * 1. An consistent, static error code by which to easily classify errors + * 2. An error message intended for humans + * + * Errors will usually assume that there is some client side error and default to 400 for + * a rest-like response. This can be changed however to more accurately, in restful terms, + * indicate what went wrong. + * + */ + +export enum ErrorSeverity { + INFO = 'info', + WARNING = 'warning', + ERROR = 'error' +} + +export type ErrorData = { + name?: string; + + code: string; + description: string; + + severity?: ErrorSeverity; + details?: string; + status?: number; + stack?: string; + + origin?: string; + + trace_id?: string; +}; + +// Maybe this could be renamed to ServiceError or something similar +export class JourneyError extends Error { + is_journey_error = true; + + errorData: ErrorData; + + constructor(data: ErrorData) { + super(`[${data.code}] ${data.description}\n ${data.details}`); + + this.errorData = data; + if (data.stack) { + this.stack = data.stack; + } + + this.name = data.name || this.constructor.name; + this.errorData.name = this.name; + } + + toString() { + return this.stack; + } + + toJSON(): ErrorData { + if (process.env.NODE_ENV !== 'production') { + return this.errorData; + } + return { + name: this.errorData.name, + code: this.errorData.code, + status: this.errorData.status, + description: this.errorData.description, + details: this.errorData.details, + trace_id: this.errorData.trace_id, + severity: this.errorData.severity, + origin: this.errorData.origin + }; + } + + setTraceId(id: string) { + this.errorData.trace_id = id; + } +} + +export class ValidationError extends JourneyError { + static CODE = 'VALIDATION_ERROR'; + constructor(errors: any) { + super({ + code: ValidationError.CODE, + status: 400, + description: 'Validation failed', + details: JSON.stringify(errors) + }); + } +} + +export class AuthorizationError extends JourneyError { + static CODE = 'AUTHORIZATION'; + constructor(errors: any) { + super({ + code: AuthorizationError.CODE, + status: 401, + description: 'Authorization failed', + details: errors + }); + } +} + +export class InternalServerError extends JourneyError { + static CODE = 'INTERNAL_SERVER_ERROR'; + constructor(err: Error) { + super({ + code: InternalServerError.CODE, + severity: ErrorSeverity.ERROR, + status: 500, + description: 'Something went wrong', + details: err.message, + stack: process.env.NODE_ENV !== 'production' ? err.stack : undefined + }); + } +} + +export class ResourceNotFound extends JourneyError { + static CODE = 'RESOURCE_NOT_FOUND'; + + /** + * @deprecated Use the (resource, id) constructor instead. + * @param id + */ + constructor(id: string); + constructor(resource: string, id: string); + + constructor(resource: string, id?: string) { + const combinedId = id ? `${resource}/${id}` : resource; + super({ + code: ResourceNotFound.CODE, + status: 404, + description: 'The requested resource does not exist on this server', + details: `The resource ${combinedId} does not exist on this server`, + severity: ErrorSeverity.INFO + }); + } +} + +export class ResourceConflict extends JourneyError { + static CODE = 'RESOURCE_CONFLICT'; + + constructor(details: string) { + super({ + code: ResourceConflict.CODE, + status: 409, + description: 'The specified resource already exists on this server', + details: details, + severity: ErrorSeverity.INFO + }); + } +} diff --git a/packages/service-framework/src/errors/utils.ts b/packages/service-framework/src/errors/utils.ts new file mode 100644 index 000000000..b9737811d --- /dev/null +++ b/packages/service-framework/src/errors/utils.ts @@ -0,0 +1,20 @@ +import { ErrorData, JourneyError } from './framework-errors'; + +export const isJourneyError = (err: any): err is JourneyError => { + const matches = err instanceof JourneyError || err.is_journey_error; + return !!matches; +}; + +export const getErrorData = (err: Error | any): ErrorData | undefined => { + if (!isJourneyError(err)) { + return; + } + return err.toJSON(); +}; + +export const matchesErrorCode = (err: Error | any, code: string) => { + if (isJourneyError(err)) { + return err.errorData.code === code; + } + return false; +}; diff --git a/packages/service-framework/src/index.ts b/packages/service-framework/src/index.ts index 181c12826..796e92c5a 100644 --- a/packages/service-framework/src/index.ts +++ b/packages/service-framework/src/index.ts @@ -1,8 +1,10 @@ -export * from './logger/Logger'; +export * from './errors/errors-index'; +export * as errors from './errors/errors-index'; -export * from './signals/probes/fs-probes'; -export * from './signals/probes/memory-probes'; -export * from './signals/probes/probes'; -export * from './signals/termination-handler'; +export * from './logger/Logger'; export * from './system/LifeCycledSystem'; +export * as system from './system/system-index'; + +export * from './signals/signals-index'; +export * as signals from './signals/signals-index'; diff --git a/packages/service-framework/src/logger/logger-index.ts b/packages/service-framework/src/logger/logger-index.ts new file mode 100644 index 000000000..58309e335 --- /dev/null +++ b/packages/service-framework/src/logger/logger-index.ts @@ -0,0 +1 @@ +export * from './Logger'; diff --git a/packages/service-framework/src/signals/signals-index.ts b/packages/service-framework/src/signals/signals-index.ts new file mode 100644 index 000000000..8ae0a96af --- /dev/null +++ b/packages/service-framework/src/signals/signals-index.ts @@ -0,0 +1,4 @@ +export * from './probes/fs-probes'; +export * from './probes/memory-probes'; +export * from './probes/probes'; +export * from './termination-handler'; diff --git a/packages/service-framework/src/system/system-index.ts b/packages/service-framework/src/system/system-index.ts new file mode 100644 index 000000000..bbd8eca75 --- /dev/null +++ b/packages/service-framework/src/system/system-index.ts @@ -0,0 +1 @@ +export * from './LifeCycledSystem'; diff --git a/packages/service-framework/test/__snapshots__/errors.test.ts.snap b/packages/service-framework/test/__snapshots__/errors.test.ts.snap new file mode 100644 index 000000000..93bda2842 --- /dev/null +++ b/packages/service-framework/test/__snapshots__/errors.test.ts.snap @@ -0,0 +1,10 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`errors > utilities should properly match a journey error 1`] = ` +{ + "code": "CUSTOM_JOURNEY_ERROR", + "description": "This is a custom error", + "details": "this is some more detailed information", + "name": "CustomJourneyError", +} +`; diff --git a/packages/service-framework/test/src/__snapshots__/errors.test.ts.snap b/packages/service-framework/test/src/__snapshots__/errors.test.ts.snap new file mode 100644 index 000000000..93bda2842 --- /dev/null +++ b/packages/service-framework/test/src/__snapshots__/errors.test.ts.snap @@ -0,0 +1,10 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`errors > utilities should properly match a journey error 1`] = ` +{ + "code": "CUSTOM_JOURNEY_ERROR", + "description": "This is a custom error", + "details": "this is some more detailed information", + "name": "CustomJourneyError", +} +`; diff --git a/packages/service-framework/test/src/errors.test.ts b/packages/service-framework/test/src/errors.test.ts new file mode 100644 index 000000000..200247281 --- /dev/null +++ b/packages/service-framework/test/src/errors.test.ts @@ -0,0 +1,49 @@ +import { describe, test, expect } from 'vitest'; + +import * as micro_errors from '../../src/errors/errors-index'; + +class CustomJourneyError extends micro_errors.JourneyError { + constructor() { + super({ + code: 'CUSTOM_JOURNEY_ERROR', + description: 'This is a custom error', + details: 'this is some more detailed information' + }); + } +} + +describe('errors', () => { + test('it should respond to instanceof checks', () => { + const error = new CustomJourneyError(); + + expect(error instanceof Error).toBe(true); + expect(error instanceof micro_errors.JourneyError).toBe(true); + expect(error.name).toBe('CustomJourneyError'); + }); + + test('it should serialize properly', () => { + const error = new CustomJourneyError(); + + // The error stack will contain host specific path information. We only care about the header + // anyway and that the stack is shown - indicated by the initial `at` text + const initial = `CustomJourneyError: [CUSTOM_JOURNEY_ERROR] This is a custom error + this is some more detailed information + at`; + + expect(`${error}`.startsWith(initial)).toBe(true); + }); + + test('utilities should properly match a journey error', () => { + const standard_error = new Error('non-journey error'); + const error = new CustomJourneyError(); + + expect(micro_errors.isJourneyError(standard_error)).toBe(false); + expect(micro_errors.isJourneyError(error)).toBe(true); + + expect(micro_errors.matchesErrorCode(error, 'CUSTOM_JOURNEY_ERROR')).toBe(true); + expect(micro_errors.matchesErrorCode(standard_error, 'CUSTOM_JOURNEY_ERROR')).toBe(false); + + expect(micro_errors.getErrorData(error)).toMatchSnapshot(); + expect(micro_errors.getErrorData(standard_error)).toBe(undefined); + }); +}); diff --git a/packages/service-framework/test/tsconfig.json b/packages/service-framework/test/tsconfig.json new file mode 100644 index 000000000..6f06c7eda --- /dev/null +++ b/packages/service-framework/test/tsconfig.json @@ -0,0 +1,16 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "rootDir": "src", + "noEmit": true, + "esModuleInterop": true, + "skipLibCheck": true, + "sourceMap": true + }, + "include": ["src"], + "references": [ + { + "path": "../" + } + ] +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fda0fe6df..00dde34ca 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -74,6 +74,9 @@ importers: packages/rsocket-router: dependencies: + '@journeyapps-platform/micro': + specifier: ^17.0.1 + version: 17.0.1(encoding@0.1.13)(socks@2.8.3) '@powersync/service-framework': specifier: workspace:* version: link:../service-framework @@ -230,6 +233,9 @@ importers: '@types/lodash': specifier: ^4.17.5 version: 4.17.5 + vitest: + specifier: ^0.34.6 + version: 0.34.6 packages/sync-rules: dependencies: diff --git a/service/src/entry.ts b/service/src/entry.ts index 597e2c75a..fcba2d7a3 100644 --- a/service/src/entry.ts +++ b/service/src/entry.ts @@ -2,10 +2,12 @@ import '@journeyapps-platform/micro/register'; import './util/register-alerting.js'; import winston from 'winston'; -import { entry, utils, system, Logger } from '@powersync/service-core'; +import { entry, utils } from '@powersync/service-core'; +import * as framework from '@powersync/service-framework'; + // Configure logging to console -system.logger.configure({ - format: utils.env.NODE_ENV == 'production' ? Logger.production_format : Logger.development_format, +framework.logger.configure({ + format: utils.env.NODE_ENV == 'production' ? framework.Logger.production_format : framework.Logger.development_format, transports: [new winston.transports.Console()] }); diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index f77617a32..f0c4ff89b 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -2,9 +2,9 @@ import { deserialize } from 'bson'; import fastify from 'fastify'; import cors from '@fastify/cors'; import * as micro from '@journeyapps-platform/micro'; +import * as framework from '@powersync/service-framework'; import { RSocketRequestMeta } from '@powersync/service-rsocket-router'; import { Metrics, routes, utils } from '@powersync/service-core'; -import { logger } from '@powersync/service-framework'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; import { Router, SocketRouter, StreamingRouter } from '../routes/router.js'; @@ -12,7 +12,7 @@ import { Router, SocketRouter, StreamingRouter } from '../routes/router.js'; * Starts an API server */ export async function startServer(runnerConfig: utils.RunnerConfig) { - logger.info('Booting'); + framework.logger.info('Booting'); const config = await utils.loadConfig(runnerConfig); const system = new PowerSyncSystem(config); @@ -55,7 +55,7 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { const { token } = routes.RSocketContextMeta.decode(deserialize(data) as any); if (!token) { - throw new micro.errors.ValidationError('No token provided in context'); + throw new framework.errors.ValidationError('No token provided in context'); } try { @@ -70,7 +70,7 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { }; } } catch (ex) { - logger.error(ex); + framework.logger.error(ex); } return { @@ -85,9 +85,9 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { payloadDecoder: async (rawData?: Buffer) => rawData && deserialize(rawData) }); - logger.info('Starting system'); + framework.logger.info('Starting system'); await system.start(); - logger.info('System started'); + framework.logger.info('System started'); Metrics.getInstance().configureApiMetrics(); @@ -97,7 +97,7 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { // This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit system.addTerminationHandler(); - logger.info(`Running on port ${system.config.port}`); + framework.logger.info(`Running on port ${system.config.port}`); await system.probe.ready(); // Enable in development to track memory usage: From 795aaf43306f4154c9d597e5ca687a8828aad24d Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 13 Jun 2024 13:57:12 +0200 Subject: [PATCH 11/36] add schema validation --- packages/service-core/src/routes/admin.ts | 15 ++- .../service-core/src/routes/checkpointing.ts | 7 +- packages/service-core/src/routes/dev.ts | 9 +- .../service-core/src/routes/socket-route.ts | 3 +- .../service-core/src/routes/sync-rules.ts | 6 +- .../service-core/src/routes/sync-stream.ts | 3 +- .../config/collectors/config-collector.ts | 8 +- packages/service-framework/package.json | 4 + .../src/codec/codec-index.ts | 2 + .../service-framework/src/codec/codecs.ts | 124 +++++++++++++++++ .../service-framework/src/codec/parsers.ts | 60 +++++++++ packages/service-framework/src/index.ts | 10 +- .../src/schema/definitions.ts | 20 +++ .../src/schema/json-schema/keywords.ts | 27 ++++ .../src/schema/json-schema/parser.ts | 127 ++++++++++++++++++ .../src/schema/schema-index.ts | 6 + .../service-framework/src/schema/utils.ts | 48 +++++++ .../src/schema/validators/schema-validator.ts | 83 ++++++++++++ .../schema/validators/ts-codec-validator.ts | 28 ++++ pnpm-lock.yaml | 12 ++ 20 files changed, 574 insertions(+), 28 deletions(-) create mode 100644 packages/service-framework/src/codec/codec-index.ts create mode 100644 packages/service-framework/src/codec/codecs.ts create mode 100644 packages/service-framework/src/codec/parsers.ts create mode 100644 packages/service-framework/src/schema/definitions.ts create mode 100644 packages/service-framework/src/schema/json-schema/keywords.ts create mode 100644 packages/service-framework/src/schema/json-schema/parser.ts create mode 100644 packages/service-framework/src/schema/schema-index.ts create mode 100644 packages/service-framework/src/schema/utils.ts create mode 100644 packages/service-framework/src/schema/validators/schema-validator.ts create mode 100644 packages/service-framework/src/schema/validators/ts-codec-validator.ts diff --git a/packages/service-core/src/routes/admin.ts b/packages/service-core/src/routes/admin.ts index 27ceeaa83..0465011d7 100644 --- a/packages/service-core/src/routes/admin.ts +++ b/packages/service-core/src/routes/admin.ts @@ -1,4 +1,3 @@ -import * as micro from '@journeyapps-platform/micro'; import * as framework from '@powersync/service-framework'; import { SqlSyncRules, SqliteValue, StaticSchema, isJsonValue, toSyncRulesValue } from '@powersync/service-sync-rules'; import { internal_routes } from '@powersync/service-types'; @@ -13,7 +12,9 @@ import { authApi } from './auth.js'; const demoCredentials: RouteGenerator = (router) => router.post('/api/admin/v1/demo-credentials', { authorize: authApi, - validator: micro.schema.createTsCodecValidator(internal_routes.DemoCredentialsRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(internal_routes.DemoCredentialsRequest, { + allowAdditional: true + }), handler: async (payload) => { const connection = payload.context.system.config.connection; if (connection == null || !connection.demo_database) { @@ -32,7 +33,7 @@ const demoCredentials: RouteGenerator = (router) => export const executeSql: RouteGenerator = (router) => router.post('/api/admin/v1/execute-sql', { authorize: authApi, - validator: micro.schema.createTsCodecValidator(internal_routes.ExecuteSqlRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(internal_routes.ExecuteSqlRequest, { allowAdditional: true }), handler: async (payload) => { const connection = payload.context.system.config.connection; if (connection == null || !connection.debug_api) { @@ -81,7 +82,7 @@ export const executeSql: RouteGenerator = (router) => export const diagnostics: RouteGenerator = (router) => router.post('/api/admin/v1/diagnostics', { authorize: authApi, - validator: micro.schema.createTsCodecValidator(internal_routes.DiagnosticsRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(internal_routes.DiagnosticsRequest, { allowAdditional: true }), handler: async (payload) => { const include_content = payload.params.sync_rules_content ?? false; const system = payload.context.system; @@ -120,7 +121,7 @@ export const diagnostics: RouteGenerator = (router) => export const getSchema: RouteGenerator = (router) => router.post('/api/admin/v1/schema', { authorize: authApi, - validator: micro.schema.createTsCodecValidator(internal_routes.GetSchemaRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(internal_routes.GetSchemaRequest, { allowAdditional: true }), handler: async (payload) => { const system = payload.context.system; @@ -131,7 +132,7 @@ export const getSchema: RouteGenerator = (router) => export const reprocess: RouteGenerator = (router) => router.post('/api/admin/v1/reprocess', { authorize: authApi, - validator: micro.schema.createTsCodecValidator(internal_routes.ReprocessRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(internal_routes.ReprocessRequest, { allowAdditional: true }), handler: async (payload) => { const system = payload.context.system; @@ -169,7 +170,7 @@ export const reprocess: RouteGenerator = (router) => export const validate: RouteGenerator = (router) => router.post('/api/admin/v1/validate', { authorize: authApi, - validator: micro.schema.createTsCodecValidator(internal_routes.ValidateRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(internal_routes.ValidateRequest, { allowAdditional: true }), handler: async (payload) => { const system = payload.context.system; diff --git a/packages/service-core/src/routes/checkpointing.ts b/packages/service-core/src/routes/checkpointing.ts index f4bbf3118..d0b819acd 100644 --- a/packages/service-core/src/routes/checkpointing.ts +++ b/packages/service-core/src/routes/checkpointing.ts @@ -1,8 +1,7 @@ import * as t from 'ts-codec'; -import * as micro from '@journeyapps-platform/micro'; +import * as framework from '@powersync/service-framework'; import * as util from '../util/util-index.js'; - import { authUser } from './auth.js'; import { RouteGenerator } from './router.js'; @@ -11,7 +10,7 @@ const WriteCheckpointRequest = t.object({}); export const writeCheckpoint: RouteGenerator = (router) => router.get('/write-checkpoint.json', { authorize: authUser, - validator: micro.schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), handler: async (payload) => { const system = payload.context.system; const storage = system.storage; @@ -26,7 +25,7 @@ export const writeCheckpoint: RouteGenerator = (router) => export const writeCheckpoint2: RouteGenerator = (router) => router.get('/write-checkpoint2.json', { authorize: authUser, - validator: micro.schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), handler: async (payload) => { const { user_id, system } = payload.context; const storage = system.storage; diff --git a/packages/service-core/src/routes/dev.ts b/packages/service-core/src/routes/dev.ts index 58cc14eef..11a800ee4 100644 --- a/packages/service-core/src/routes/dev.ts +++ b/packages/service-core/src/routes/dev.ts @@ -1,5 +1,4 @@ import * as t from 'ts-codec'; -import * as micro from '@journeyapps-platform/micro'; import * as framework from '@powersync/service-framework'; import * as pgwire from '@powersync/service-jpgwire'; @@ -15,7 +14,7 @@ const AuthParams = t.object({ // For legacy web client only. Remove soon. export const auth: RouteGenerator = (router) => router.post('/auth.json', { - validator: micro.schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), handler: async (payload) => { const { user, password } = payload.params; const config = payload.context.system.config; @@ -35,7 +34,7 @@ export const auth: RouteGenerator = (router) => export const auth2: RouteGenerator = (router) => router.post('/dev/auth.json', { - validator: micro.schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), handler: async (payload) => { const { user, password } = payload.params; const config = payload.context.system.config; @@ -57,7 +56,7 @@ const TokenParams = t.object({}); export const token: RouteGenerator = (router) => router.post('/dev/token.json', { - validator: micro.schema.createTsCodecValidator(TokenParams, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(TokenParams, { allowAdditional: true }), authorize: authDevUser, handler: async (payload) => { const { user_id } = payload.context; @@ -86,7 +85,7 @@ const CrudRequest = t.object({ }); export const crud: RouteGenerator = (router) => router.post('/crud.json', { - validator: micro.schema.createTsCodecValidator(CrudRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(CrudRequest, { allowAdditional: true }), authorize: authUser, handler: async (payload) => { diff --git a/packages/service-core/src/routes/socket-route.ts b/packages/service-core/src/routes/socket-route.ts index f84813b80..0f905c854 100644 --- a/packages/service-core/src/routes/socket-route.ts +++ b/packages/service-core/src/routes/socket-route.ts @@ -1,6 +1,5 @@ import { serialize } from 'bson'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; -import * as micro from '@journeyapps-platform/micro'; import * as framework from '@powersync/service-framework'; import * as util from '../util/util-index.js'; @@ -18,7 +17,7 @@ export const sync_stream_reactive: SocketRouteGenerator = (router) => errors: ['Authentication required'].concat(context.token_errors ?? []) }; }, - validator: micro.schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }), handler: async ({ context, params, responder, observer, initialN }) => { const { system } = context; diff --git a/packages/service-core/src/routes/sync-rules.ts b/packages/service-core/src/routes/sync-rules.ts index 47c19c96c..631a73e00 100644 --- a/packages/service-core/src/routes/sync-rules.ts +++ b/packages/service-core/src/routes/sync-rules.ts @@ -26,7 +26,7 @@ export const deploySyncRules: RouteGenerator = (router) => authorize: authApi, parse: true, plugins: [yamlPlugin], - validator: micro.schema.createTsCodecValidator(DeploySyncRulesRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(DeploySyncRulesRequest, { allowAdditional: true }), handler: async (payload) => { if (payload.context.system.config.sync_rules.present) { // If sync rules are configured via the config, disable deploy via the API. @@ -69,7 +69,7 @@ export const validateSyncRules: RouteGenerator = (router) => authorize: authApi, parse: true, plugins: [yamlPlugin], - validator: micro.schema.createTsCodecValidator(ValidateSyncRulesRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(ValidateSyncRulesRequest, { allowAdditional: true }), handler: async (payload) => { const content = payload.params.content; @@ -123,7 +123,7 @@ const ReprocessSyncRulesRequest = t.object({}); export const reprocessSyncRules: RouteGenerator = (router) => router.post('/api/sync-rules/v1/reprocess', { authorize: authApi, - validator: micro.schema.createTsCodecValidator(ReprocessSyncRulesRequest), + validator: framework.schema.createTsCodecValidator(ReprocessSyncRulesRequest), handler: async (payload) => { const storage = payload.context.system.storage; const sync_rules = await storage.getActiveSyncRules(); diff --git a/packages/service-core/src/routes/sync-stream.ts b/packages/service-core/src/routes/sync-stream.ts index 3114f9f8b..b6e727c96 100644 --- a/packages/service-core/src/routes/sync-stream.ts +++ b/packages/service-core/src/routes/sync-stream.ts @@ -1,5 +1,4 @@ import { Readable } from 'stream'; -import * as micro from '@journeyapps-platform/micro'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; import * as sync from '../sync/sync-index.js'; @@ -17,7 +16,7 @@ export enum SyncRoutes { export const syncStreamed: RouteGenerator = (router) => router.post(SyncRoutes.STREAM, { authorize: authUser, - validator: micro.schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }), + validator: framework.schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }), handler: async (payload) => { const userId = payload.context.user_id!; const system = payload.context.system; diff --git a/packages/service-core/src/util/config/collectors/config-collector.ts b/packages/service-core/src/util/config/collectors/config-collector.ts index 2097e0eb2..dc2068dc8 100644 --- a/packages/service-core/src/util/config/collectors/config-collector.ts +++ b/packages/service-core/src/util/config/collectors/config-collector.ts @@ -1,8 +1,10 @@ import * as t from 'ts-codec'; +import * as yaml from 'yaml'; + import { configFile } from '@powersync/service-types'; -import * as micro from '@journeyapps-platform/micro'; +import * as framework from '@powersync/service-framework'; + import { RunnerConfig } from '../types.js'; -import * as yaml from 'yaml'; export enum ConfigFileFormat { YAML = 'yaml', @@ -22,7 +24,7 @@ export enum ConfigFileFormat { const YAML_ENV_PREFIX = 'PS_'; // ts-codec itself doesn't give great validation errors, so we use json schema for that -const configSchemaValidator = micro.schema +const configSchemaValidator = framework.schema .parseJSONSchema( t.generateJSONSchema(configFile.powerSyncConfig, { allowAdditional: true, parsers: [configFile.portParser] }) ) diff --git a/packages/service-framework/package.json b/packages/service-framework/package.json index 3aedfdb77..ff432fba4 100644 --- a/packages/service-framework/package.json +++ b/packages/service-framework/package.json @@ -19,7 +19,11 @@ }, "keywords": [], "dependencies": { + "ajv": "^8.12.0", + "better-ajv-errors": "^1.2.0", + "bson": "^6.6.0", "lodash": "^4.17.21", + "ts-codec": "^1.2.2", "winston": "^3.13.0" }, "devDependencies": { diff --git a/packages/service-framework/src/codec/codec-index.ts b/packages/service-framework/src/codec/codec-index.ts new file mode 100644 index 000000000..d044f08c2 --- /dev/null +++ b/packages/service-framework/src/codec/codec-index.ts @@ -0,0 +1,2 @@ +export * from './codecs'; +export * from './parsers'; diff --git a/packages/service-framework/src/codec/codecs.ts b/packages/service-framework/src/codec/codecs.ts new file mode 100644 index 000000000..5c7acc920 --- /dev/null +++ b/packages/service-framework/src/codec/codecs.ts @@ -0,0 +1,124 @@ +import * as t from 'ts-codec'; +import * as bson from 'bson'; + +export const buffer = t.codec( + 'Buffer', + (buffer) => { + if (!Buffer.isBuffer(buffer)) { + throw new t.TransformError([`Expected buffer but got ${typeof buffer}`]); + } + return buffer.toString('base64'); + }, + (buffer) => Buffer.from(buffer, 'base64') +); + +export const date = t.codec( + 'Date', + (date) => { + if (!(date instanceof Date)) { + throw new t.TransformError([`Expected Date but got ${typeof date}`]); + } + return date.toISOString(); + }, + (date) => { + const parsed = new Date(date); + if (isNaN(parsed.getTime())) { + throw new t.TransformError([`Invalid date`]); + } + return parsed; + } +); + +const assertObjectId = (value: any) => { + if (!bson.ObjectId.isValid(value)) { + throw new t.TransformError([`Expected an ObjectId but got ${typeof value}`]); + } +}; +export const ObjectId = t.codec( + 'ObjectId', + (id) => { + assertObjectId(id); + return id.toHexString(); + }, + (id) => { + assertObjectId(id); + return new bson.ObjectId(id); + } +); + +const assertObjectWithField = (field: string, data: any) => { + if (typeof data !== 'object') { + throw new t.TransformError([`Expected an object but got ${typeof data}`]); + } + if (!(field in data)) { + throw new t.TransformError([`Expected ${field} to be a member of object`]); + } +}; +export const ResourceId = t.codec<{ _id: bson.ObjectId }, { id: string }>( + 'ResourceId', + (data) => { + assertObjectWithField('_id', data); + return { + id: ObjectId.encode(data._id) + }; + }, + (data) => { + assertObjectWithField('id', data); + return { + _id: ObjectId.decode(data.id) + }; + } +); + +export const Timestamps = t.object({ + created_at: date, + updated_at: date +}); + +export const Resource = ResourceId.and(Timestamps); + +export const QueryFilter = t.object({ + exists: t.boolean +}); + +export const makeQueryFilter = (type: t.AnyCodec) => { + return type.or(t.array(type)).or(QueryFilter).optional(); +}; + +export const FilterProperties = ( + type: T +): t.Codec< + { [K in keyof t.Encoded]?: t.Ix[K] | t.Ix[K][] | t.Ix }, + { [K in keyof t.Encoded]?: t.Ox[K] | t.Ox[K][] | t.Ox }, + 'FilterProperties' +> => { + let codecs = new Map(); + + const addCodecs = (codec: t.ObjectCodec) => { + if (codec.props?.shape) { + Object.keys(codec.props.shape).forEach((k) => { + codecs.set(k, codec.props.shape[k]); + }); + } + }; + + if (type._tag === t.CodecType.Object) { + addCodecs(type); + } else if (type._tag === t.CodecType.Intersection) { + type.props.codecs.forEach((c: t.AnyCodec) => { + addCodecs(c); + }); + } + + t.object({ + test: t.string + }); + + // @ts-ignore + return t.object( + Array.from(codecs.keys()).reduce((prev: any, cur) => { + prev[cur] = makeQueryFilter(codecs.get(cur)!); + return prev; + }, {}) + ); +}; diff --git a/packages/service-framework/src/codec/parsers.ts b/packages/service-framework/src/codec/parsers.ts new file mode 100644 index 000000000..038636ebe --- /dev/null +++ b/packages/service-framework/src/codec/parsers.ts @@ -0,0 +1,60 @@ +import * as codecs from './codecs'; +import * as t from 'ts-codec'; + +export const ObjectIdParser = t.createParser(codecs.ObjectId._tag, (_, { target }) => { + switch (target) { + case t.TransformTarget.Encoded: { + return { type: 'string' }; + } + case t.TransformTarget.Decoded: { + return { bsonType: 'ObjectId' }; + } + } +}); + +export const ResourceIdParser = t.createParser(codecs.ResourceId._tag, (_, { target }) => { + switch (target) { + case t.TransformTarget.Encoded: { + return { + type: 'object', + properties: { + id: { type: 'string' } + }, + required: ['id'] + }; + } + case t.TransformTarget.Decoded: { + return { + type: 'object', + properties: { + _id: { bsonType: 'ObjectId' } + }, + required: ['_id'] + }; + } + } +}); + +export const DateParser = t.createParser(codecs.date._tag, (_, { target }) => { + switch (target) { + case t.TransformTarget.Encoded: { + return { type: 'string' }; + } + case t.TransformTarget.Decoded: { + return { nodeType: 'date' }; + } + } +}); + +export const BufferParser = t.createParser(codecs.buffer._tag, (_, { target }) => { + switch (target) { + case t.TransformTarget.Encoded: { + return { type: 'string' }; + } + case t.TransformTarget.Decoded: { + return { nodeType: 'buffer' }; + } + } +}); + +export const parsers = [ObjectIdParser, ResourceIdParser, DateParser, BufferParser]; diff --git a/packages/service-framework/src/index.ts b/packages/service-framework/src/index.ts index 796e92c5a..08a66bdb1 100644 --- a/packages/service-framework/src/index.ts +++ b/packages/service-framework/src/index.ts @@ -1,10 +1,16 @@ +export * from './codec/codec-index'; +export * as codecs from './codec/codec-index'; + export * from './errors/errors-index'; export * as errors from './errors/errors-index'; export * from './logger/Logger'; -export * from './system/LifeCycledSystem'; -export * as system from './system/system-index'; +export * from './schema/schema-index'; +export * as schema from './schema/schema-index'; export * from './signals/signals-index'; export * as signals from './signals/signals-index'; + +export * from './system/LifeCycledSystem'; +export * as system from './system/system-index'; diff --git a/packages/service-framework/src/schema/definitions.ts b/packages/service-framework/src/schema/definitions.ts new file mode 100644 index 000000000..e293df334 --- /dev/null +++ b/packages/service-framework/src/schema/definitions.ts @@ -0,0 +1,20 @@ +export type JSONSchema = { + definitions?: Record; + [key: string]: any; +}; + +export type IValidationRight = { + valid: true; +}; + +export type ValidationLeft = { + valid: false; + errors: T; +}; + +export type ValidationResponse = ValidationLeft | IValidationRight; + +export type MicroValidator = { + validate: (data: T) => ValidationResponse; + toJSONSchema?: () => JSONSchema; +}; diff --git a/packages/service-framework/src/schema/json-schema/keywords.ts b/packages/service-framework/src/schema/json-schema/keywords.ts new file mode 100644 index 000000000..06ee104a1 --- /dev/null +++ b/packages/service-framework/src/schema/json-schema/keywords.ts @@ -0,0 +1,27 @@ +import * as ajv from 'ajv'; + +export const BufferNodeType: ajv.KeywordDefinition = { + keyword: 'nodeType', + metaSchema: { + type: 'string', + enum: ['buffer', 'date'] + }, + error: { + message: ({ schemaCode }) => { + return ajv.str`should be a ${schemaCode}`; + } + }, + code(context) { + switch (context.schema) { + case 'buffer': { + return context.fail(ajv._`!Buffer.isBuffer(${context.data})`); + } + case 'date': { + return context.fail(ajv._`!(${context.data} instanceof Date)`); + } + default: { + context.fail(ajv._`true`); + } + } + } +}; diff --git a/packages/service-framework/src/schema/json-schema/parser.ts b/packages/service-framework/src/schema/json-schema/parser.ts new file mode 100644 index 000000000..38f4593f5 --- /dev/null +++ b/packages/service-framework/src/schema/json-schema/parser.ts @@ -0,0 +1,127 @@ +import * as schema_validator from '../validators/schema-validator'; +import * as defs from '../definitions'; + +/** + * Recursively walk a given schema resolving a list of refs that are actively used in some way by the + * root schema. This information can then later be used to prune unused definitions. + * + * This only works for top-level references to 'definitions' as this is intended to be used in + * conjunction with tools that generate schemas in deterministic ways. For a more general + * implementation one should make use of `$RefParser`. + * + * We don't use this here as it resolves to a Promise, which we want to avoid for this tool + */ +export const findUsedRefs = (schema: any, definitions = schema.definitions, cache: string[] = []): string[] => { + if (Array.isArray(schema)) { + return schema + .map((subschema) => { + return findUsedRefs(subschema, definitions, cache); + }) + .flat(); + } + + if (typeof schema === 'object') { + return Object.keys(schema).reduce((used: string[], key) => { + const value = schema[key]; + if (key === '$ref') { + if (cache.includes(value)) { + return used; + } + cache.push(value); + const subschema = definitions[value.replace('#/definitions/', '')]; + used.push(value, ...findUsedRefs(subschema, definitions, cache)); + return used; + } + if (key === 'definitions') { + return used; + } + return used.concat(findUsedRefs(value, definitions, cache)); + }, []); + } + + return []; +}; + +/** + * Prune a given subschema definitions map by comparing keys against a given collection of + * definition keys that are referenced in some way, either directly or indirectly, by the + * root schema + */ +export const pruneDefinitions = (definitions: Record, refs: string[]) => { + return Object.keys(definitions).reduce((pruned: Record, key) => { + if (refs.includes(`#/definitions/${key}`)) { + pruned[key] = definitions[key]; + } + return pruned; + }, {}); +}; + +export type CompilerFunction = () => defs.JSONSchema; +export type ValidatorFunction = ( + params?: schema_validator.CreateSchemaValidatorParams +) => schema_validator.SchemaValidator; + +export type Compilers = { + compile: CompilerFunction; + validator: ValidatorFunction; +}; +export type WithCompilers = Compilers & { + [P in keyof T]: T[P] extends Record ? WithCompilers : T[P]; +}; + +/** + * Given a JSON Schema containing a `definitions` entry, return a Proxy representation of the same + * schema which responds to `compile` and `validator` arbitrarily deep. + * + * Calling compile on a sub-schema will 'inject' the root schema `definitions` mapping and remove + * the Proxy wrapping. + * + * Calling `validator` on a sub-schema will `compile` and then create a SchemaValidator from the + * resulting schema + */ +export const parseJSONSchema = ( + schema: T, + definitions = schema.definitions +): WithCompilers => { + return new Proxy(schema, { + get(target: any, prop) { + const compile: CompilerFunction = () => { + const schema = { + definitions: definitions, + ...target + }; + + if (!schema.definitions) { + return schema; + } + + const used = findUsedRefs(schema); + return { + ...schema, + definitions: pruneDefinitions(schema.definitions, used) + }; + }; + const validator: ValidatorFunction = (options) => { + return schema_validator.createSchemaValidator(compile(), options); + }; + + if (prop === 'compile') { + return compile; + } + if (prop === 'validator') { + return validator; + } + + const subschema = target[prop]; + + if (Array.isArray(subschema)) { + return subschema; + } + if (typeof subschema !== 'object') { + return subschema; + } + + return parseJSONSchema(subschema, definitions); + } + }); +}; diff --git a/packages/service-framework/src/schema/schema-index.ts b/packages/service-framework/src/schema/schema-index.ts new file mode 100644 index 000000000..6dceedbbd --- /dev/null +++ b/packages/service-framework/src/schema/schema-index.ts @@ -0,0 +1,6 @@ +export * from './definitions'; + +export * from './json-schema/keywords'; +export * from './json-schema/parser'; +export * from './validators/schema-validator'; +export * from './validators/ts-codec-validator'; diff --git a/packages/service-framework/src/schema/utils.ts b/packages/service-framework/src/schema/utils.ts new file mode 100644 index 000000000..af96afe32 --- /dev/null +++ b/packages/service-framework/src/schema/utils.ts @@ -0,0 +1,48 @@ +import * as errors from '../errors/errors-index'; +import * as defs from './definitions'; + +export type Schema = { + additionalProperties?: boolean | Schema; + [key: string]: any; +}; + +/** + * Utility function to strip out `additionalProperties` fields from a given JSON-Schema. This can be used + * to make a schema less strict which may be necessary for certain use-cases + */ +export const allowAdditionalProperties = (schema: T): T => { + return Object.keys(schema).reduce((next_schema: any, key) => { + const value = schema[key]; + + if (key === 'additionalProperties' && typeof value === 'boolean') { + return next_schema; + } + + if (Array.isArray(value)) { + next_schema[key] = value.map((value) => { + if (typeof value !== 'object') { + return value; + } + return allowAdditionalProperties(value); + }); + } else if (typeof value === 'object') { + next_schema[key] = allowAdditionalProperties(value); + } else { + next_schema[key] = value; + } + + return next_schema; + }, {}); +}; + +/** + * A small utility for validating some data using a MicroValidator. Will return the valid data (typed correctly) or throw + * a validation error + */ +export const validateData = (event: any, validator: defs.MicroValidator): T => { + const result = validator.validate(event); + if (!result.valid) { + throw new errors.ValidationError(result.errors); + } + return event; +}; diff --git a/packages/service-framework/src/schema/validators/schema-validator.ts b/packages/service-framework/src/schema/validators/schema-validator.ts new file mode 100644 index 000000000..4ffb603bd --- /dev/null +++ b/packages/service-framework/src/schema/validators/schema-validator.ts @@ -0,0 +1,83 @@ +// @ts-ignore +import AjvErrorFormatter from 'better-ajv-errors'; +import AJV, * as ajv from 'ajv'; + +import * as defs from '../definitions'; +import * as utils from '../utils'; +import * as keywords from '../json-schema/keywords'; + +export class SchemaValidatorError extends Error { + constructor(message: string) { + super(message); + + this.name = this.constructor.name; + } +} + +export type SchemaValidator = defs.MicroValidator; + +export type CreateSchemaValidatorParams = { + ajv?: ajv.Options; + + /** + * Allow making the given schema loosely typed to allow accepting additional properties. This + * is useful in certain scenarios such as accepting kafka events that are going through a + * migration and having additional properties set + */ + allowAdditional?: boolean; + + fail_fast?: boolean; +}; + +/** + * Create a validator from a given JSON-Schema schema object. This makes uses of AJV internally + * to compile a validation function + */ +export const createSchemaValidator = ( + schema: defs.JSONSchema, + params: CreateSchemaValidatorParams = {} +): SchemaValidator => { + try { + const ajv = new AJV({ + allErrors: !(params.fail_fast ?? false), + keywords: [keywords.BufferNodeType], + ...(params.ajv || {}) + }); + + let processed_schema = schema; + if (params.allowAdditional) { + processed_schema = utils.allowAdditionalProperties(processed_schema); + } + + const validator = ajv.compile(processed_schema); + + return { + toJSONSchema: () => { + return schema; + }, + + validate: (data) => { + const valid = validator(data); + + if (!valid) { + const errors = AjvErrorFormatter(processed_schema, data, validator.errors || [], { + format: 'js' + })?.map((error: any) => error.error); + + return { + valid: false, + errors: errors || [] + }; + } + + return { + valid: true + }; + } + }; + } catch (err) { + // Here we re-throw the error because the original error thrown by AJV has a deep stack that + // obfuscates the location of the error in application code + throw new SchemaValidatorError(err.message); + } +}; diff --git a/packages/service-framework/src/schema/validators/ts-codec-validator.ts b/packages/service-framework/src/schema/validators/ts-codec-validator.ts new file mode 100644 index 000000000..bfca98340 --- /dev/null +++ b/packages/service-framework/src/schema/validators/ts-codec-validator.ts @@ -0,0 +1,28 @@ +import * as t from 'ts-codec'; + +import * as schema_validator from './schema-validator'; +import * as defs from '../definitions'; +import * as codecs from '../../codec/codec-index'; + +export type TsCodecValidator< + C extends t.AnyCodec, + T extends t.TransformTarget = t.TransformTarget.Encoded +> = T extends t.TransformTarget.Encoded ? defs.MicroValidator> : defs.MicroValidator>; + +type ValidatorOptions = Partial> & { + target?: T; +}; + +/** + * Create a validator from a given ts-codec codec + */ +export const createTsCodecValidator = ( + codec: C, + options?: ValidatorOptions +): TsCodecValidator => { + const schema = t.generateJSONSchema(codec, { + ...(options || {}), + parsers: [...(options?.parsers ?? []), ...codecs.parsers] + }); + return schema_validator.createSchemaValidator(schema); +}; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 00dde34ca..e40a3bcc8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -223,9 +223,21 @@ importers: packages/service-framework: dependencies: + ajv: + specifier: ^8.12.0 + version: 8.14.0 + better-ajv-errors: + specifier: ^1.2.0 + version: 1.2.0(ajv@8.14.0) + bson: + specifier: ^6.6.0 + version: 6.7.0 lodash: specifier: ^4.17.21 version: 4.17.21 + ts-codec: + specifier: ^1.2.2 + version: 1.2.2 winston: specifier: ^3.13.0 version: 3.13.0 From 2c5efd4fc2cec84f91c99a4ddce3aae1ecc79cbb Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 13 Jun 2024 14:11:10 +0200 Subject: [PATCH 12/36] add schema utils helpers --- packages/service-core/src/util/env.ts | 2 +- packages/service-core/test/src/env.ts | 12 +- packages/service-framework/package.json | 4 +- packages/service-framework/src/index.ts | 3 + .../src/utils/environment-variables.ts | 69 +++++++ .../src/utils/utils-index.ts | 1 + .../src/__snapshots__/parser.test.ts.snap | 73 +++++++ .../schema-validation.test.ts.snap | 66 +++++++ .../ts-codec-validation.test.ts.snap | 17 ++ .../test/src/fixtures/schema.ts | 49 +++++ .../schema/__snapshots__/parser.test.ts.snap | 73 +++++++ .../schema-validation.test.ts.snap | 66 +++++++ .../ts-codec-validation.test.ts.snap | 17 ++ .../test/src/schema/parser.test.ts | 84 ++++++++ .../test/src/schema/schema-validation.test.ts | 180 ++++++++++++++++++ .../src/schema/ts-codec-validation.test.ts | 103 ++++++++++ pnpm-lock.yaml | 6 + 17 files changed, 817 insertions(+), 8 deletions(-) create mode 100644 packages/service-framework/src/utils/environment-variables.ts create mode 100644 packages/service-framework/src/utils/utils-index.ts create mode 100644 packages/service-framework/test/src/__snapshots__/parser.test.ts.snap create mode 100644 packages/service-framework/test/src/__snapshots__/schema-validation.test.ts.snap create mode 100644 packages/service-framework/test/src/__snapshots__/ts-codec-validation.test.ts.snap create mode 100644 packages/service-framework/test/src/fixtures/schema.ts create mode 100644 packages/service-framework/test/src/schema/__snapshots__/parser.test.ts.snap create mode 100644 packages/service-framework/test/src/schema/__snapshots__/schema-validation.test.ts.snap create mode 100644 packages/service-framework/test/src/schema/__snapshots__/ts-codec-validation.test.ts.snap create mode 100644 packages/service-framework/test/src/schema/parser.test.ts create mode 100644 packages/service-framework/test/src/schema/schema-validation.test.ts create mode 100644 packages/service-framework/test/src/schema/ts-codec-validation.test.ts diff --git a/packages/service-core/src/util/env.ts b/packages/service-core/src/util/env.ts index 1794adf2b..6d14cd94f 100644 --- a/packages/service-core/src/util/env.ts +++ b/packages/service-core/src/util/env.ts @@ -1,4 +1,4 @@ -import { utils } from '@journeyapps-platform/micro'; +import { utils } from '@powersync/service-framework'; import { ServiceRunner } from './config/types.js'; diff --git a/packages/service-core/test/src/env.ts b/packages/service-core/test/src/env.ts index e37fbcef8..f5867fc33 100644 --- a/packages/service-core/test/src/env.ts +++ b/packages/service-core/test/src/env.ts @@ -1,8 +1,8 @@ -import * as micro from '@journeyapps-platform/micro'; +import * as framework from '@powersync/service-framework'; -export const env = micro.utils.collectEnvironmentVariables({ - MONGO_TEST_URL: micro.utils.type.string.default('mongodb://localhost:27017/powersync_test'), - PG_TEST_URL: micro.utils.type.string.default('postgres://postgres:postgres@localhost:5432/powersync_test'), - CI: micro.utils.type.boolean.default('false'), - SLOW_TESTS: micro.utils.type.boolean.default('false') +export const env = framework.utils.collectEnvironmentVariables({ + MONGO_TEST_URL: framework.utils.type.string.default('mongodb://localhost:27017/powersync_test'), + PG_TEST_URL: framework.utils.type.string.default('postgres://postgres:postgres@localhost:5432/powersync_test'), + CI: framework.utils.type.boolean.default('false'), + SLOW_TESTS: framework.utils.type.boolean.default('false') }); diff --git a/packages/service-framework/package.json b/packages/service-framework/package.json index ff432fba4..a1874bdd3 100644 --- a/packages/service-framework/package.json +++ b/packages/service-framework/package.json @@ -22,9 +22,11 @@ "ajv": "^8.12.0", "better-ajv-errors": "^1.2.0", "bson": "^6.6.0", + "dotenv": "^16.4.5", "lodash": "^4.17.21", "ts-codec": "^1.2.2", - "winston": "^3.13.0" + "winston": "^3.13.0", + "zod": "^3.23.8" }, "devDependencies": { "@types/lodash": "^4.17.5", diff --git a/packages/service-framework/src/index.ts b/packages/service-framework/src/index.ts index 08a66bdb1..e5f1550fc 100644 --- a/packages/service-framework/src/index.ts +++ b/packages/service-framework/src/index.ts @@ -14,3 +14,6 @@ export * as signals from './signals/signals-index'; export * from './system/LifeCycledSystem'; export * as system from './system/system-index'; + +export * from './utils/utils-index'; +export * as utils from './utils/utils-index'; diff --git a/packages/service-framework/src/utils/environment-variables.ts b/packages/service-framework/src/utils/environment-variables.ts new file mode 100644 index 000000000..733bcf4c7 --- /dev/null +++ b/packages/service-framework/src/utils/environment-variables.ts @@ -0,0 +1,69 @@ +import * as dotenv from 'dotenv'; +import * as t from 'zod'; + +const string = t.string(); +const number = t + .string() + .refine((value) => !isNaN(parseInt(value))) + .transform((value) => parseInt(value)); + +const convertToBoolean = (value: string) => { + switch (value) { + case '1': + case 'true': { + return true; + } + case '0': + case 'false': { + return false; + } + } +}; +const boolean = t + .string() + .refine((value) => ['0', '1', 'true', 'false'].includes(value)) + .transform(convertToBoolean); + +const list = t.string().transform((value) => value.split(',')); + +export const DefaultSchema = t.object({ + DEV_MODE_DO_NOT_ENABLE_IN_PRODUCTION_OR_YOU_WILL_BE_FIRED: boolean.default('false') +}); +export type DefaultSchema = t.infer; + +export const collectEnvironmentVariablesFromSchema = ( + schema: T, + override?: Record +): t.infer => { + let env; + if (override) { + env = override; + } else { + dotenv.config(); + env = process.env; + } + + const result = schema.safeParse(env); + + if (!result.success) { + console.log(JSON.stringify(result.error.format(), null, 2)); + throw new Error('Invalid or missing environment variables'); + } + + if (result.data.DEV_MODE_DO_NOT_ENABLE_IN_PRODUCTION_OR_YOU_WILL_BE_FIRED) { + console.log('DEV MODE. DO NOT ENABLE IN PRODUCTION'); + } + + return result.data; +}; + +export const collectEnvironmentVariables = (schema: T, override?: Record) => { + return collectEnvironmentVariablesFromSchema(t.object(schema).and(DefaultSchema), override); +}; + +export const type = { + string, + number, + boolean, + list +}; diff --git a/packages/service-framework/src/utils/utils-index.ts b/packages/service-framework/src/utils/utils-index.ts new file mode 100644 index 000000000..29115f7c7 --- /dev/null +++ b/packages/service-framework/src/utils/utils-index.ts @@ -0,0 +1 @@ +export * from './environment-variables'; diff --git a/packages/service-framework/test/src/__snapshots__/parser.test.ts.snap b/packages/service-framework/test/src/__snapshots__/parser.test.ts.snap new file mode 100644 index 000000000..52d9475f9 --- /dev/null +++ b/packages/service-framework/test/src/__snapshots__/parser.test.ts.snap @@ -0,0 +1,73 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`schema-tools > it should correctly prune unused definitions 1`] = ` +{ + "definitions": { + "b": { + "properties": { + "prop": { + "type": "string", + }, + }, + "required": [ + "prop", + ], + "type": "object", + }, + "b1": { + "properties": { + "prop": { + "$ref": "#/definitions/b", + }, + }, + "required": [ + "prop", + ], + "type": "object", + }, + "c": { + "properties": { + "prop": { + "$ref": "#/definitions/c", + }, + }, + "required": [ + "prop", + ], + "type": "object", + }, + }, + "properties": { + "a": { + "properties": { + "a": { + "$ref": "#/definitions/b1", + }, + "b": { + "enum": [ + "A", + ], + }, + }, + "type": "object", + }, + "b": { + "oneOf": [ + { + "additionalProperties": false, + "properties": { + "a": { + "$ref": "#/definitions/c", + }, + }, + "required": [ + "a", + ], + "type": "object", + }, + ], + }, + }, + "type": "object", +} +`; diff --git a/packages/service-framework/test/src/__snapshots__/schema-validation.test.ts.snap b/packages/service-framework/test/src/__snapshots__/schema-validation.test.ts.snap new file mode 100644 index 000000000..5aa01b1fd --- /dev/null +++ b/packages/service-framework/test/src/__snapshots__/schema-validation.test.ts.snap @@ -0,0 +1,66 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`json-schema-validation > fails validation for json-schema 1`] = ` +{ + "errors": [ + "/name/b must be equal to one of the allowed values: A", + ], + "valid": false, +} +`; + +exports[`json-schema-validation > fails validation for json-schema 2`] = ` +{ + "errors": [ + "/name must have required property 'a'", + "/b: oneOf must match exactly one schema in oneOf", + "/b/a: type must be number", + ], + "valid": false, +} +`; + +exports[`json-schema-validation > fails validation for json-schema due to additional properties 1`] = ` +{ + "errors": [ + "/name Property c is not expected to be here", + ], + "valid": false, +} +`; + +exports[`json-schema-validation > it correctly validates node types 1`] = ` +{ + "errors": [ + "/a: nodeType should be a buffer", + ], + "valid": false, +} +`; + +exports[`json-schema-validation > it should correctly validate subschemas 1`] = ` +{ + "errors": [ + "/b: type must be string", + ], + "valid": false, +} +`; + +exports[`json-schema-validation > passes json-schema validation with additional properties when allowed 1`] = ` +{ + "valid": true, +} +`; + +exports[`json-schema-validation > passes validation for json-schema 1`] = ` +{ + "valid": true, +} +`; + +exports[`json-schema-validation > passes validation with refs 1`] = ` +{ + "valid": true, +} +`; diff --git a/packages/service-framework/test/src/__snapshots__/ts-codec-validation.test.ts.snap b/packages/service-framework/test/src/__snapshots__/ts-codec-validation.test.ts.snap new file mode 100644 index 000000000..9b262886f --- /dev/null +++ b/packages/service-framework/test/src/__snapshots__/ts-codec-validation.test.ts.snap @@ -0,0 +1,17 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`ts-codec validation > fails validation for runtime codec 1`] = ` +{ + "errors": [ + " must have required property 'surname'", + "/name: type must be string", + "/other/b: const must be equal to constant", + "/tuple/0: type must be string", + "/or: type must be number", + "/or: type must be string", + "/or: anyOf must match a schema in anyOf", + "/complex must have required property 'c'", + ], + "valid": false, +} +`; diff --git a/packages/service-framework/test/src/fixtures/schema.ts b/packages/service-framework/test/src/fixtures/schema.ts new file mode 100644 index 000000000..3a64ec742 --- /dev/null +++ b/packages/service-framework/test/src/fixtures/schema.ts @@ -0,0 +1,49 @@ +export default { + definitions: { + c: { + type: 'object', + properties: { + prop: { + type: 'string' + } + }, + required: ['prop'] + } + }, + + type: 'object', + properties: { + name: { + type: 'object', + properties: { + a: { + type: 'string' + }, + b: { + enum: ['A'] + } + }, + required: ['a'], + additionalProperties: false + }, + b: { + oneOf: [ + { + type: 'object', + properties: { + a: { + type: 'number' + } + }, + required: ['a'], + additionalProperties: false + } + ] + }, + d: { + $ref: '#/definitions/c' + } + }, + required: ['name', 'b'], + additionalProperties: false +}; diff --git a/packages/service-framework/test/src/schema/__snapshots__/parser.test.ts.snap b/packages/service-framework/test/src/schema/__snapshots__/parser.test.ts.snap new file mode 100644 index 000000000..52d9475f9 --- /dev/null +++ b/packages/service-framework/test/src/schema/__snapshots__/parser.test.ts.snap @@ -0,0 +1,73 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`schema-tools > it should correctly prune unused definitions 1`] = ` +{ + "definitions": { + "b": { + "properties": { + "prop": { + "type": "string", + }, + }, + "required": [ + "prop", + ], + "type": "object", + }, + "b1": { + "properties": { + "prop": { + "$ref": "#/definitions/b", + }, + }, + "required": [ + "prop", + ], + "type": "object", + }, + "c": { + "properties": { + "prop": { + "$ref": "#/definitions/c", + }, + }, + "required": [ + "prop", + ], + "type": "object", + }, + }, + "properties": { + "a": { + "properties": { + "a": { + "$ref": "#/definitions/b1", + }, + "b": { + "enum": [ + "A", + ], + }, + }, + "type": "object", + }, + "b": { + "oneOf": [ + { + "additionalProperties": false, + "properties": { + "a": { + "$ref": "#/definitions/c", + }, + }, + "required": [ + "a", + ], + "type": "object", + }, + ], + }, + }, + "type": "object", +} +`; diff --git a/packages/service-framework/test/src/schema/__snapshots__/schema-validation.test.ts.snap b/packages/service-framework/test/src/schema/__snapshots__/schema-validation.test.ts.snap new file mode 100644 index 000000000..5aa01b1fd --- /dev/null +++ b/packages/service-framework/test/src/schema/__snapshots__/schema-validation.test.ts.snap @@ -0,0 +1,66 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`json-schema-validation > fails validation for json-schema 1`] = ` +{ + "errors": [ + "/name/b must be equal to one of the allowed values: A", + ], + "valid": false, +} +`; + +exports[`json-schema-validation > fails validation for json-schema 2`] = ` +{ + "errors": [ + "/name must have required property 'a'", + "/b: oneOf must match exactly one schema in oneOf", + "/b/a: type must be number", + ], + "valid": false, +} +`; + +exports[`json-schema-validation > fails validation for json-schema due to additional properties 1`] = ` +{ + "errors": [ + "/name Property c is not expected to be here", + ], + "valid": false, +} +`; + +exports[`json-schema-validation > it correctly validates node types 1`] = ` +{ + "errors": [ + "/a: nodeType should be a buffer", + ], + "valid": false, +} +`; + +exports[`json-schema-validation > it should correctly validate subschemas 1`] = ` +{ + "errors": [ + "/b: type must be string", + ], + "valid": false, +} +`; + +exports[`json-schema-validation > passes json-schema validation with additional properties when allowed 1`] = ` +{ + "valid": true, +} +`; + +exports[`json-schema-validation > passes validation for json-schema 1`] = ` +{ + "valid": true, +} +`; + +exports[`json-schema-validation > passes validation with refs 1`] = ` +{ + "valid": true, +} +`; diff --git a/packages/service-framework/test/src/schema/__snapshots__/ts-codec-validation.test.ts.snap b/packages/service-framework/test/src/schema/__snapshots__/ts-codec-validation.test.ts.snap new file mode 100644 index 000000000..9b262886f --- /dev/null +++ b/packages/service-framework/test/src/schema/__snapshots__/ts-codec-validation.test.ts.snap @@ -0,0 +1,17 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`ts-codec validation > fails validation for runtime codec 1`] = ` +{ + "errors": [ + " must have required property 'surname'", + "/name: type must be string", + "/other/b: const must be equal to constant", + "/tuple/0: type must be string", + "/or: type must be number", + "/or: type must be string", + "/or: anyOf must match a schema in anyOf", + "/complex must have required property 'c'", + ], + "valid": false, +} +`; diff --git a/packages/service-framework/test/src/schema/parser.test.ts b/packages/service-framework/test/src/schema/parser.test.ts new file mode 100644 index 000000000..a0161aa32 --- /dev/null +++ b/packages/service-framework/test/src/schema/parser.test.ts @@ -0,0 +1,84 @@ +import { describe, test, expect } from 'vitest'; + +import * as framework_schema from '../../../src/schema/schema-index'; + +describe('schema-tools', () => { + test('it should correctly prune unused definitions', () => { + const schema = framework_schema.parseJSONSchema({ + definitions: { + // unused, should be stripped out + a: { + type: 'object', + properties: { + prop: { + type: 'string' + } + }, + required: ['prop'] + }, + + // extended reference, should be included after walking the full schema + b: { + type: 'object', + properties: { + prop: { + type: 'string' + } + }, + required: ['prop'] + }, + b1: { + type: 'object', + properties: { + prop: { + $ref: '#/definitions/b' + } + }, + required: ['prop'] + }, + + // circular reference, should not result in the walker getting stuck + c: { + type: 'object', + properties: { + prop: { + $ref: '#/definitions/c' + } + }, + required: ['prop'] + } + }, + + type: 'object', + properties: { + a: { + type: 'object', + properties: { + a: { + $ref: '#/definitions/b1' + }, + b: { + enum: ['A'] + } + } + }, + b: { + oneOf: [ + { + type: 'object', + properties: { + a: { + $ref: '#/definitions/c' + } + }, + required: ['a'], + additionalProperties: false + } + ] + } + } + }); + + expect(schema.compile()).toMatchSnapshot(); + }); +}); diff --git a/packages/service-framework/test/src/schema/schema-validation.test.ts b/packages/service-framework/test/src/schema/schema-validation.test.ts new file mode 100644 index 000000000..13e900c1f --- /dev/null +++ b/packages/service-framework/test/src/schema/schema-validation.test.ts @@ -0,0 +1,180 @@ +import { describe, test, it, expect } from 'vitest'; + +import base_schema from '../fixtures/schema'; +import * as framework_schema from '../../../src/schema/schema-index'; + +const base_validator = framework_schema.createSchemaValidator(base_schema); + +describe('json-schema-validation', () => { + test('passes validation for json-schema', () => { + const result = base_validator.validate({ + name: { + a: '1', + b: 'A' + }, + b: { + a: 2 + } + }); + + expect(result).toMatchSnapshot(); + }); + + test('fails validation for json-schema', () => { + const result1 = base_validator.validate({ + name: { + a: '1', + b: 'B' + }, + b: { + a: 2 + } + }); + + expect(result1).toMatchSnapshot(); + + const result2 = base_validator.validate({ + name: {}, + b: { + a: '' + } + }); + + expect(result2).toMatchSnapshot(); + }); + + test('passes validation with refs', () => { + const result = base_validator.validate({ + name: { + a: '1', + b: 'A' + }, + b: { + a: 2 + }, + d: { + prop: 'abc' + } + }); + + expect(result).toMatchSnapshot(); + }); + + test('fails validation for json-schema due to additional properties', () => { + const result = base_validator.validate({ + name: { + a: '1', + b: 'A', + c: 'additional property' + }, + b: { + a: 2 + } + }); + + expect(result).toMatchSnapshot(); + }); + + test('passes json-schema validation with additional properties when allowed', () => { + const validator = framework_schema.createSchemaValidator(base_schema, { + allowAdditional: true + }); + + const result = validator.validate({ + name: { + a: '1', + b: 'A', + c: 'additional property' + }, + b: { + a: 2 + } + }); + + expect(result).toMatchSnapshot(); + }); + + const subschema = framework_schema.parseJSONSchema({ + definitions: { + a: { + type: 'string' + }, + b: { + type: 'object', + properties: { + a: { type: 'string' }, + b: { $ref: '#/definitions/a' } + }, + required: ['b'] + } + } + }); + + test('it should correctly validate subschemas', () => { + const validator = subschema.definitions.b.validator(); + + const res1 = validator.validate({ + a: 'a', + b: 1 + }); + expect(res1).toMatchSnapshot(); + + const res2 = validator.validate({ + a: 'a', + b: 'b' + }); + + expect(res2.valid).toBe(true); + }); + + test('it correctly validates node types', () => { + const validator = framework_schema.createSchemaValidator({ + type: 'object', + properties: { + a: { + nodeType: 'buffer' + }, + b: { + nodeType: 'date' + } + }, + required: ['a'] + }); + + const res = validator.validate({ + a: Buffer.from('123'), + b: new Date() + }); + expect(res.valid).toBe(true); + + const res2 = validator.validate({ + a: '123' + }); + expect(res2.valid).toBe(false); + expect(res2).toMatchSnapshot(); + }); + + it('should fail to compile invalid node types', () => { + try { + framework_schema.createSchemaValidator({ + type: 'object', + properties: { + a: { + nodeType: 'Buffer' + }, + b: { + nodeType: 'Date' + }, + c: { + nodeType: 'unknown' + } + }, + required: ['a', 'b', 'c'] + }); + } catch (err) { + expect(err).toBeInstanceOf(framework_schema.SchemaValidatorError); + } + + expect.assertions(1); + }); +}); diff --git a/packages/service-framework/test/src/schema/ts-codec-validation.test.ts b/packages/service-framework/test/src/schema/ts-codec-validation.test.ts new file mode 100644 index 000000000..6a817650c --- /dev/null +++ b/packages/service-framework/test/src/schema/ts-codec-validation.test.ts @@ -0,0 +1,103 @@ +import * as t from 'ts-codec'; +import { describe, test, expect } from 'vitest'; + +import * as framework_schema from '../../../src/schema/schema-index'; + +describe('ts-codec validation', () => { + enum Values { + A = 'A', + B = 'B' + } + + const codec = t.object({ + name: t.string, + surname: t.string, + other: t.object({ + a: t.array(t.string), + b: t.literal('optional').optional() + }), + tuple: t.tuple([t.string, t.number]), + or: t.number.or(t.string), + enum: t.Enum(Values), + + complex: t + .object({ + a: t.string + }) + .and( + t.object({ + b: t.number + }) + ) + .and( + t.object({ + c: t + .object({ + a: t.string + }) + .and( + t + .object({ + b: t.boolean + }) + .or( + t.object({ + c: t.number + }) + ) + ) + }) + ) + }); + + test('passes validation for codec', () => { + const validator = framework_schema.createTsCodecValidator(codec); + + const result = validator.validate({ + name: 'a', + surname: 'b', + other: { + a: ['nice'], + b: 'optional' + }, + tuple: ['string', 1], + or: 1, + enum: Values.A, + + complex: { + a: '', + b: 1, + c: { + a: '', + b: true + } + } + }); + + expect(result.valid).toBe(true); + }); + + test('fails validation for runtime codec', () => { + const validator = framework_schema.createTsCodecValidator(codec); + + const result = validator.validate({ + // @ts-ignore + name: 1, + other: { + a: ['nice'], + // @ts-ignore + b: 'op' + }, + // @ts-ignore + tuple: [1, 1], + // @ts-ignore + enum: 'c', + // @ts-ignore + or: [], + // @ts-ignore + complex: {} + }); + + expect(result).toMatchSnapshot(); + }); +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e40a3bcc8..b06bc6627 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -232,6 +232,9 @@ importers: bson: specifier: ^6.6.0 version: 6.7.0 + dotenv: + specifier: ^16.4.5 + version: 16.4.5 lodash: specifier: ^4.17.21 version: 4.17.21 @@ -241,6 +244,9 @@ importers: winston: specifier: ^3.13.0 version: 3.13.0 + zod: + specifier: ^3.23.8 + version: 3.23.8 devDependencies: '@types/lodash': specifier: ^4.17.5 From fd37b4b4613e328db54dddb6f316ab99028e21cd Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 13 Jun 2024 15:57:37 +0200 Subject: [PATCH 13/36] alerting and error reporting --- .../service-core/src/replication/WalStream.ts | 7 +- .../src/replication/WalStreamManager.ts | 8 +- .../src/replication/WalStreamRunner.ts | 10 +- packages/service-core/src/runner/teardown.ts | 26 +- .../src/storage/MongoBucketStorage.ts | 8 +- .../src/storage/mongo/MongoBucketBatch.ts | 10 +- .../storage/mongo/MongoSyncBucketStorage.ts | 7 +- .../src/system/CorePowerSyncSystem.ts | 19 +- packages/service-core/src/util/alerting.ts | 8 - .../service-core/test/src/slow_tests.test.ts | 8 +- packages/service-core/test/src/util.ts | 3 +- .../service-core/test/src/wal_stream_utils.ts | 5 +- .../src/alerts/alerts-index.ts | 2 + .../src/alerts/definitions.ts | 17 + .../src/alerts/no-op-reporter.ts | 6 + packages/service-framework/src/index.ts | 3 + pnpm-lock.yaml | 724 ++++++++++++++++++ service/package.json | 2 + service/src/entry.ts | 3 - service/src/system/PowerSyncSystem.ts | 7 +- service/src/util/alerting.ts | 66 ++ service/src/util/register-alerting.ts | 41 - 22 files changed, 899 insertions(+), 91 deletions(-) create mode 100644 packages/service-framework/src/alerts/alerts-index.ts create mode 100644 packages/service-framework/src/alerts/definitions.ts create mode 100644 packages/service-framework/src/alerts/no-op-reporter.ts create mode 100644 service/src/util/alerting.ts delete mode 100644 service/src/util/register-alerting.ts diff --git a/packages/service-core/src/replication/WalStream.ts b/packages/service-core/src/replication/WalStream.ts index 1ea0f1bd1..49ca8e59f 100644 --- a/packages/service-core/src/replication/WalStream.ts +++ b/packages/service-core/src/replication/WalStream.ts @@ -19,6 +19,7 @@ export interface WalStreamOptions { storage: storage.SyncRulesBucketStorage; abort_signal: AbortSignal; probe: ProbeModule; + errorReporter: framework.ErrorReporter; } interface InitResult { @@ -51,6 +52,7 @@ export class WalStream { private startedStreaming = false; private probe: ProbeModule; + private errorReporter: framework.ErrorReporter; constructor(options: WalStreamOptions) { this.storage = options.storage; @@ -59,6 +61,7 @@ export class WalStream { this.slot_name = options.storage.slot_name; this.connections = options.connections; this.probe = options.probe; + this.errorReporter = options.errorReporter; this.wal_connection = new WalConnection({ db: this.connections.pool, sync_rules: this.sync_rules }); this.abort_signal = options.abort_signal; @@ -201,7 +204,7 @@ export class WalStream { await this.touch(); if (i == 0) { - util.captureException(last_error, { + this.errorReporter.captureException(last_error, { level: framework.errors.ErrorSeverity.ERROR, metadata: { replication_slot: slotName @@ -242,7 +245,7 @@ export class WalStream { /replication slot.*does not exist/.test(e.message) || /publication.*does not exist/.test(e.message) ) { - util.captureException(e, { + this.errorReporter.captureException(e, { level: framework.errors.ErrorSeverity.WARNING, metadata: { try_index: i, diff --git a/packages/service-core/src/replication/WalStreamManager.ts b/packages/service-core/src/replication/WalStreamManager.ts index aabab7d28..67357206c 100644 --- a/packages/service-core/src/replication/WalStreamManager.ts +++ b/packages/service-core/src/replication/WalStreamManager.ts @@ -37,7 +37,7 @@ export class WalStreamManager { start() { this.runLoop().catch((e) => { logger.error(`Fatal WalStream error`, e); - util.captureException(e); + this.system.errorReporter.captureException(e); setTimeout(() => { process.exit(1); }, 1000); @@ -159,7 +159,8 @@ export class WalStreamManager { source_db: this.system.config.connection!, lock, rateLimiter: this.rateLimiter, - probe: this.system.probe + probe: this.system.probe, + errorReporter: this.system.errorReporter }); newStreams.set(syncRules.id, stream); stream.start(); @@ -201,7 +202,8 @@ export class WalStreamManager { storage: storage, source_db: this.system.config.connection!, lock, - probe: this.system.probe + probe: this.system.probe, + errorReporter: this.system.errorReporter }); await stream.terminate(); } finally { diff --git a/packages/service-core/src/replication/WalStreamRunner.ts b/packages/service-core/src/replication/WalStreamRunner.ts index a35e38d9c..6b2857386 100644 --- a/packages/service-core/src/replication/WalStreamRunner.ts +++ b/packages/service-core/src/replication/WalStreamRunner.ts @@ -6,7 +6,7 @@ import * as util from '../util/util-index.js'; import { ErrorRateLimiter } from './ErrorRateLimiter.js'; import { MissingReplicationSlotError, WalStream } from './WalStream.js'; import { ResolvedConnection } from '../util/config/types.js'; -import { logger, ProbeModule } from '@powersync/service-framework'; +import { ErrorReporter, logger, ProbeModule } from '@powersync/service-framework'; export interface WalStreamRunnerOptions { factory: storage.BucketStorageFactory; @@ -15,6 +15,7 @@ export interface WalStreamRunnerOptions { lock: storage.ReplicationLock; rateLimiter?: ErrorRateLimiter; probe: ProbeModule; + errorReporter: ErrorReporter; } export class WalStreamRunner { @@ -47,7 +48,7 @@ export class WalStreamRunner { await this.replicateLoop(); } catch (e) { // Fatal exception - util.captureException(e, { + this.options.errorReporter.captureException(e, { metadata: { replication_slot: this.slot_name } @@ -94,7 +95,8 @@ export class WalStreamRunner { factory: this.options.factory, storage: this.options.storage, connections, - probe: this.options.probe + probe: this.options.probe, + errorReporter: this.options.errorReporter }); await stream.replicate(); } catch (e) { @@ -126,7 +128,7 @@ export class WalStreamRunner { throw e; } else { // Report the error if relevant, before retrying - util.captureException(e, { + this.options.errorReporter.captureException(e, { metadata: { replication_slot: this.slot_name } diff --git a/packages/service-core/src/runner/teardown.ts b/packages/service-core/src/runner/teardown.ts index 496039dcf..db8c5b3cf 100644 --- a/packages/service-core/src/runner/teardown.ts +++ b/packages/service-core/src/runner/teardown.ts @@ -9,7 +9,7 @@ import * as db from '../db/db-index.js'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; import * as replication from '../replication/replication-index.js'; -import { logger, createFSProbe } from '@powersync/service-framework'; +import { logger, createFSProbe, ErrorReporter, NoOpReporter } from '@powersync/service-framework'; /** * Attempt to terminate a single sync rules instance. @@ -19,7 +19,8 @@ import { logger, createFSProbe } from '@powersync/service-framework'; async function terminateReplicator( storageFactory: storage.BucketStorageFactory, connection: utils.ResolvedConnection, - syncRules: storage.PersistedSyncRulesContent + syncRules: storage.PersistedSyncRulesContent, + errorReporter: ErrorReporter ) { // The lock may still be active if the current replication instance // hasn't stopped yet. @@ -32,7 +33,8 @@ async function terminateReplicator( storage: storage, source_db: connection, lock, - probe: createFSProbe() + probe: createFSProbe(), + errorReporter }); console.log('terminating', stream.slot_name); await stream.terminate(); @@ -52,7 +54,8 @@ async function terminateReplicator( */ async function terminateReplicators( storageFactory: storage.BucketStorageFactory, - connection: utils.ResolvedConnection + connection: utils.ResolvedConnection, + errorReporter: ErrorReporter ) { const start = Date.now(); while (Date.now() - start < 12_000) { @@ -60,7 +63,7 @@ async function terminateReplicators( const replicationRules = await storageFactory.getReplicatingSyncRules(); for (let syncRules of replicationRules) { try { - await terminateReplicator(storageFactory, connection, syncRules); + await terminateReplicator(storageFactory, connection, syncRules, errorReporter); } catch (e) { retry = true; console.error(e); @@ -74,16 +77,23 @@ async function terminateReplicators( } } -export async function teardown(runnerConfig: utils.RunnerConfig) { +// TODO should there be a global context for things like alerting? + +export async function teardown(runnerConfig: utils.RunnerConfig, errorReporter?: ErrorReporter) { const config = await utils.loadConfig(runnerConfig); const mongoDB = storage.createPowerSyncMongo(config.storage); await db.mongo.waitForAuth(mongoDB.db); - const bucketStorage = new storage.MongoBucketStorage(mongoDB, { slot_name_prefix: config.slot_name_prefix }); + const resolvedAlerting = errorReporter ?? NoOpReporter; + + const bucketStorage = new storage.MongoBucketStorage(mongoDB, { + slot_name_prefix: config.slot_name_prefix, + errorReporter: resolvedAlerting + }); const connection = config.connection; if (connection) { - await terminateReplicators(bucketStorage, connection); + await terminateReplicators(bucketStorage, connection, resolvedAlerting); } const database = mongoDB.db; diff --git a/packages/service-core/src/storage/MongoBucketStorage.ts b/packages/service-core/src/storage/MongoBucketStorage.ts index 526dd8ed9..720733114 100644 --- a/packages/service-core/src/storage/MongoBucketStorage.ts +++ b/packages/service-core/src/storage/MongoBucketStorage.ts @@ -24,7 +24,7 @@ import { SyncRuleDocument, SyncRuleState } from './mongo/models.js'; import { generateSlotName } from './mongo/util.js'; import { locks } from '@journeyapps-platform/micro'; import { v4 as uuid } from 'uuid'; -import { logger } from '@powersync/service-framework'; +import { ErrorReporter, logger } from '@powersync/service-framework'; export interface MongoBucketStorageOptions extends PowerSyncMongoOptions {} @@ -32,6 +32,7 @@ export class MongoBucketStorage implements BucketStorageFactory { private readonly client: mongo.MongoClient; private readonly session: mongo.ClientSession; public readonly slot_name_prefix: string; + protected errorReporter: ErrorReporter; private readonly storageCache = new LRUCache({ max: 3, @@ -54,10 +55,11 @@ export class MongoBucketStorage implements BucketStorageFactory { public readonly db: PowerSyncMongo; - constructor(db: PowerSyncMongo, options: { slot_name_prefix: string }) { + constructor(db: PowerSyncMongo, options: { slot_name_prefix: string; errorReporter: ErrorReporter }) { this.client = db.client; this.db = db; this.session = this.client.startSession(); + this.errorReporter = options.errorReporter; this.slot_name_prefix = options.slot_name_prefix; } @@ -66,7 +68,7 @@ export class MongoBucketStorage implements BucketStorageFactory { if ((typeof id as any) == 'bigint') { id = Number(id); } - return new MongoSyncBucketStorage(this, id, sync_rules, slot_name); + return new MongoSyncBucketStorage(this, id, sync_rules, slot_name, this.errorReporter); } async configureSyncRules(sync_rules: string, options?: { lock?: boolean }) { diff --git a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts index 6d5334844..9ca151847 100644 --- a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts +++ b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts @@ -1,4 +1,3 @@ -import * as micro from '@journeyapps-platform/micro'; import { SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules'; import * as bson from 'bson'; import * as mongo from 'mongodb'; @@ -62,7 +61,8 @@ export class MongoBucketBatch implements BucketStorageBatch { group_id: number, slot_name: string, last_checkpoint_lsn: string | null, - no_checkpoint_before_lsn: string | null + no_checkpoint_before_lsn: string | null, + protected errorReporter: framework.ErrorReporter ) { this.db = db; this.client = db.client; @@ -279,7 +279,7 @@ export class MongoBucketBatch implements BucketStorageBatch { ); afterData = new bson.Binary(bson.serialize(after!)); - micro.alerts.captureMessage( + this.errorReporter.captureMessage( `Data too big on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${e.message}`, { level: framework.errors.ErrorSeverity.WARNING, @@ -336,7 +336,7 @@ export class MongoBucketBatch implements BucketStorageBatch { }); for (let error of errors) { - micro.alerts.captureMessage( + this.errorReporter.captureMessage( `Failed to evaluate data query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}`, { level: framework.errors.ErrorSeverity.WARNING, @@ -376,7 +376,7 @@ export class MongoBucketBatch implements BucketStorageBatch { ); for (let error of paramErrors) { - micro.alerts.captureMessage( + this.errorReporter.captureMessage( `Failed to evaluate parameter query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}`, { level: framework.errors.ErrorSeverity.WARNING, diff --git a/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts b/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts index 58aae774a..a32e0ba7a 100644 --- a/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts +++ b/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts @@ -22,6 +22,7 @@ import { PowerSyncMongo } from './db.js'; import { BucketDataDocument, BucketDataKey, SourceKey, SyncRuleState } from './models.js'; import { MongoBucketBatch } from './MongoBucketBatch.js'; import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, readSingleBatch, serializeLookup } from './util.js'; +import { ErrorReporter } from '@powersync/service-framework'; export class MongoSyncBucketStorage implements SyncRulesBucketStorage { private readonly db: PowerSyncMongo; @@ -30,7 +31,8 @@ export class MongoSyncBucketStorage implements SyncRulesBucketStorage { public readonly factory: MongoBucketStorage, public readonly group_id: number, public readonly sync_rules: SqlSyncRules, - public readonly slot_name: string + public readonly slot_name: string, + protected errorReporter: ErrorReporter ) { this.db = factory.db; } @@ -63,7 +65,8 @@ export class MongoSyncBucketStorage implements SyncRulesBucketStorage { this.group_id, this.slot_name, checkpoint_lsn, - doc?.no_checkpoint_before ?? null + doc?.no_checkpoint_before ?? null, + this.errorReporter ); try { await callback(batch); diff --git a/packages/service-core/src/system/CorePowerSyncSystem.ts b/packages/service-core/src/system/CorePowerSyncSystem.ts index 71fca4542..a912ef3cf 100644 --- a/packages/service-core/src/system/CorePowerSyncSystem.ts +++ b/packages/service-core/src/system/CorePowerSyncSystem.ts @@ -1,19 +1,30 @@ import * as pgwire from '@powersync/service-jpgwire'; -import { LifeCycledSystem, LifeCycledSystemOptions, logger } from '@powersync/service-framework'; +import { + ErrorReporter, + LifeCycledSystem, + LifeCycledSystemOptions, + logger, + NoOpReporter +} from '@powersync/service-framework'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; +export interface CorePowerSyncSystemOptions extends LifeCycledSystemOptions { + errorReporter?: ErrorReporter; +} + export abstract class CorePowerSyncSystem extends LifeCycledSystem { abstract storage: storage.BucketStorageFactory; abstract pgwire_pool?: pgwire.PgClient; + errorReporter: ErrorReporter; + closed: boolean; protected stopHandlers: Set<() => void> = new Set(); - closed: boolean; - - constructor(public config: utils.ResolvedPowerSyncConfig, options?: LifeCycledSystemOptions) { + constructor(public config: utils.ResolvedPowerSyncConfig, options?: CorePowerSyncSystemOptions) { super(options); + this.errorReporter = options?.errorReporter ?? NoOpReporter; this.closed = false; } diff --git a/packages/service-core/src/util/alerting.ts b/packages/service-core/src/util/alerting.ts index d29cfeca5..447708787 100644 --- a/packages/service-core/src/util/alerting.ts +++ b/packages/service-core/src/util/alerting.ts @@ -1,5 +1,3 @@ -import * as micro from '@journeyapps-platform/micro'; - let globalTags: Record = {}; export function setTags(tags: Record) { @@ -9,9 +7,3 @@ export function setTags(tags: Record) { export function getGlobalTags() { return globalTags; } - -export function captureException(error: any, options?: micro.alerts.CaptureOptions) { - micro.alerts.captureException(error, { - ...options - }); -} diff --git a/packages/service-core/test/src/slow_tests.test.ts b/packages/service-core/test/src/slow_tests.test.ts index ccc32abad..8a7c0238a 100644 --- a/packages/service-core/test/src/slow_tests.test.ts +++ b/packages/service-core/test/src/slow_tests.test.ts @@ -10,7 +10,7 @@ import * as pgwire from '@powersync/service-jpgwire'; import { SqliteRow } from '@powersync/service-sync-rules'; import { MongoBucketStorage } from '../../src/storage/MongoBucketStorage.js'; import { PgManager } from '../../src/util/PgManager.js'; -import { createInMemoryProbe } from '@powersync/service-framework'; +import { NoOpReporter, createInMemoryProbe } from '@powersync/service-framework'; describe('slow tests - mongodb', function () { // These are slow, inconsistent tests. @@ -73,7 +73,8 @@ bucket_definitions: connections, storage: storage, factory: f, - probe: createInMemoryProbe() + probe: createInMemoryProbe(), + errorReporter: NoOpReporter }; walStream = new WalStream(options); @@ -189,7 +190,8 @@ bucket_definitions: connections, storage: storage, factory: f, - probe: createInMemoryProbe() + probe: createInMemoryProbe(), + errorReporter: NoOpReporter }; walStream = new WalStream(options); diff --git a/packages/service-core/test/src/util.ts b/packages/service-core/test/src/util.ts index ce2f255bb..35dad47aa 100644 --- a/packages/service-core/test/src/util.ts +++ b/packages/service-core/test/src/util.ts @@ -7,6 +7,7 @@ import { PowerSyncMongo } from '../../src/storage/mongo/db.js'; import { escapeIdentifier } from '../../src/util/pgwire_utils.js'; import { env } from './env.js'; import { Metrics } from '@/metrics/Metrics.js'; +import { NoOpReporter } from '@powersync/service-framework'; // The metrics need to be initialised before they can be used await Metrics.initialise({ @@ -23,7 +24,7 @@ export type StorageFactory = () => Promise; export const MONGO_STORAGE_FACTORY: StorageFactory = async () => { const db = await connectMongo(); await db.clear(); - return new MongoBucketStorage(db, { slot_name_prefix: 'test_' }); + return new MongoBucketStorage(db, { slot_name_prefix: 'test_', errorReporter: NoOpReporter }); }; export async function clearTestDb(db: pgwire.PgClient) { diff --git a/packages/service-core/test/src/wal_stream_utils.ts b/packages/service-core/test/src/wal_stream_utils.ts index b32be9c4e..9c3b6270b 100644 --- a/packages/service-core/test/src/wal_stream_utils.ts +++ b/packages/service-core/test/src/wal_stream_utils.ts @@ -6,7 +6,7 @@ import { getClientCheckpoint } from '../../src/util/utils.js'; import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js'; import { PgManager } from '../../src/util/PgManager.js'; import { JSONBig } from '@powersync/service-jsonbig'; -import { createInMemoryProbe } from '@powersync/service-framework'; +import { NoOpReporter, createInMemoryProbe } from '@powersync/service-framework'; /** * Tests operating on the wal stream need to configure the stream and manage asynchronous @@ -71,7 +71,8 @@ export class WalStreamTestContext { factory: this.factory, connections: this.connections, abort_signal: this.abortController.signal, - probe: createInMemoryProbe() + probe: createInMemoryProbe(), + errorReporter: NoOpReporter }; this._walStream = new WalStream(options); return this._walStream!; diff --git a/packages/service-framework/src/alerts/alerts-index.ts b/packages/service-framework/src/alerts/alerts-index.ts new file mode 100644 index 000000000..32f970c74 --- /dev/null +++ b/packages/service-framework/src/alerts/alerts-index.ts @@ -0,0 +1,2 @@ +export * from './definitions'; +export * from './no-op-reporter'; diff --git a/packages/service-framework/src/alerts/definitions.ts b/packages/service-framework/src/alerts/definitions.ts new file mode 100644 index 000000000..9d2906ff8 --- /dev/null +++ b/packages/service-framework/src/alerts/definitions.ts @@ -0,0 +1,17 @@ +import * as errors from '../errors/errors-index'; + +export type Primitive = string | number | boolean; + +export type CaptureOptions = { + level?: errors.ErrorSeverity; + tags?: Record; + metadata?: Record; +}; + +export type CaptureErrorFunction = (error: any, options?: CaptureOptions) => void; +export type CaptureMessageFunction = (message: string, options?: CaptureOptions) => void; + +export type ErrorReporter = { + captureException: CaptureErrorFunction; + captureMessage: CaptureMessageFunction; +}; diff --git a/packages/service-framework/src/alerts/no-op-reporter.ts b/packages/service-framework/src/alerts/no-op-reporter.ts new file mode 100644 index 000000000..b8c887665 --- /dev/null +++ b/packages/service-framework/src/alerts/no-op-reporter.ts @@ -0,0 +1,6 @@ +import { ErrorReporter } from './definitions'; + +export const NoOpReporter: ErrorReporter = { + captureException: () => {}, + captureMessage: () => {} +}; diff --git a/packages/service-framework/src/index.ts b/packages/service-framework/src/index.ts index e5f1550fc..7f128e3e8 100644 --- a/packages/service-framework/src/index.ts +++ b/packages/service-framework/src/index.ts @@ -1,3 +1,6 @@ +export * from './alerts/alerts-index'; +export * as alerts from './alerts/alerts-index'; + export * from './codec/codec-index'; export * as codecs from './codec/codec-index'; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b06bc6627..e7b8a2a1f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -327,6 +327,9 @@ importers: '@powersync/service-types': specifier: workspace:* version: link:../packages/types + '@sentry/node': + specifier: ^8.9.2 + version: 8.9.2 async-mutex: specifier: ^0.5.0 version: 0.5.0 @@ -376,6 +379,9 @@ importers: specifier: ^2.3.2 version: 2.4.2 devDependencies: + '@sentry/types': + specifier: ^8.9.2 + version: 8.9.2 '@types/uuid': specifier: ^9.0.4 version: 9.0.8 @@ -833,6 +839,10 @@ packages: resolution: {integrity: sha512-E3skn949Pk1z2XtXu/lxf6QAZpawuTM/IUEXcAzpiUkTd73Hmvw26FiN3cJuTmkpM5hZzHwkomVdtrh/n/zzwA==} engines: {node: '>=14'} + '@opentelemetry/api-logs@0.52.0': + resolution: {integrity: sha512-HxjD7xH9iAE4OyhNaaSec65i1H6QZYBWSwWkowFfsc5YAcDvJG30/J1sRKXEQqdmUcKTXEAnA66UciqZha/4+Q==} + engines: {node: '>=14'} + '@opentelemetry/api@1.3.0': resolution: {integrity: sha512-YveTnGNsFFixTKJz09Oi4zYkiLT5af3WpZDu4aIUM7xX+2bHAkOJayFTVQd6zB8kkWPpbua4Ha6Ql00grdLlJQ==} engines: {node: '>=8.0.0'} @@ -845,6 +855,16 @@ packages: resolution: {integrity: sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==} engines: {node: '>=8.0.0'} + '@opentelemetry/api@1.9.0': + resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} + engines: {node: '>=8.0.0'} + + '@opentelemetry/context-async-hooks@1.25.0': + resolution: {integrity: sha512-sBW313mnMyFg0cp/40BRzrZBWG+581s2j5gIsa5fgGadswyILk4mNFATsqrCOpAx945RDuZ2B7ThQLgor9OpfA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/context-async-hooks@1.8.0': resolution: {integrity: sha512-ueLmocbWDi1aoU4IPdOQyt4qz/Dx+NYyU4qoa3d683usbnkDLUXYXJFfKIMPFV2BbrI5qtnpTtzErCKewoM8aw==} engines: {node: '>=14'} @@ -863,6 +883,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.9.0' + '@opentelemetry/core@1.25.0': + resolution: {integrity: sha512-n0B3s8rrqGrasTgNkXLKXzN0fXo+6IYP7M5b7AMsrZM33f/y6DS6kJ0Btd7SespASWq8bgL3taLo0oe0vB52IQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/core@1.8.0': resolution: {integrity: sha512-6SDjwBML4Am0AQmy7z1j6HGrWDgeK8awBRUvl1PGw6HayViMk4QpnUXvv4HTHisecgVBy43NE/cstWprm8tIfw==} engines: {node: '>=14'} @@ -929,24 +955,132 @@ packages: peerDependencies: '@opentelemetry/api': ^1.0.0 + '@opentelemetry/instrumentation-connect@0.37.0': + resolution: {integrity: sha512-SeQktDIH5rNzjiEiazWiJAIXkmnLOnNV7wwHpahrqE0Ph+Z3heqMfxRtoMtbdJSIYLfcNZYO51AjxZ00IXufdw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-express@0.40.1': + resolution: {integrity: sha512-+RKMvVe2zw3kIXRup9c1jFu3T4d0fs5aKy015TpiMyoCKX1UMu3Z0lfgYtuyiSTANvg5hZnDbWmQmqSPj9VTvg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-fastify@0.37.0': + resolution: {integrity: sha512-WRjwzNZgupSzbEYvo9s+QuHJRqZJjVdNxSEpGBwWK8RKLlHGwGVAu0gcc2gPamJWUJsGqPGvahAPWM18ZkWj6A==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-graphql@0.41.0': + resolution: {integrity: sha512-R/gXeljgIhaRDKquVkKYT5QHPnFouM8ooyePZEP0kqyaVAedtR1V7NfAUJbxfTG5fBQa5wdmLjvu63+tzRXZCA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-hapi@0.39.0': + resolution: {integrity: sha512-ik2nA9Yj2s2ay+aNY+tJsKCsEx6Tsc2g/MK0iWBW5tibwrWKTy1pdVt5sB3kd5Gkimqj23UV5+FH2JFcQLeKug==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/instrumentation-http@0.34.0': resolution: {integrity: sha512-sZxpYOggRIFwdcdy1wWBGG8fwiuWWK4j3qv/rdqTwcPvrVT4iSCoPNDMZYxOcxSEP1fybq28SK43e+IKwxVElQ==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/instrumentation-http@0.52.0': + resolution: {integrity: sha512-E6ywZuxTa4LnVXZGwL1oj3e2Eog1yIaNqa8KjKXoGkDNKte9/SjQnePXOmhQYI0A9nf0UyFbP9aKd+yHrkJXUA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-ioredis@0.41.0': + resolution: {integrity: sha512-rxiLloU8VyeJGm5j2fZS8ShVdB82n7VNP8wTwfUQqDwRfHCnkzGr+buKoxuhGD91gtwJ91RHkjHA1Eg6RqsUTg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-koa@0.41.0': + resolution: {integrity: sha512-mbPnDt7ELvpM2S0vixYUsde7122lgegLOJQxx8iJQbB8YHal/xnTh9v7IfArSVzIDo+E+080hxZyUZD4boOWkw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/instrumentation-mongodb@0.33.0': resolution: {integrity: sha512-bjRF55grOFRn5XQxm1yDL56FD9UVvmIcBDSsgA0dbUr3VOUu3sN7o34t2uDx7EpnfwhMeAvOBO1wbWXdHBzapg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/instrumentation-mongodb@0.45.0': + resolution: {integrity: sha512-xnZP9+ayeB1JJyNE9cIiwhOJTzNEsRhXVdLgfzmrs48Chhhk026mQdM5CITfyXSCfN73FGAIB8d91+pflJEfWQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-mongoose@0.39.0': + resolution: {integrity: sha512-J1r66A7zJklPPhMtrFOO7/Ud2p0Pv5u8+r23Cd1JUH6fYPmftNJVsLp2urAt6PHK4jVqpP/YegN8wzjJ2mZNPQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-mysql2@0.39.0': + resolution: {integrity: sha512-Iypuq2z6TCfriAXCIZjRq8GTFCKhQv5SpXbmI+e60rYdXw8NHtMH4NXcGF0eKTuoCsC59IYSTUvDQYDKReaszA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-mysql@0.39.0': + resolution: {integrity: sha512-8snHPh83rhrDf31v9Kq0Nf+ts8hdr7NguuszRqZomZBHgE0+UyXZSkXHAAFZoBPPRMGyM68uaFE5hVtFl+wOcA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-nestjs-core@0.38.0': + resolution: {integrity: sha512-M381Df1dM8aqihZz2yK+ugvMFK5vlHG/835dc67Sx2hH4pQEQYDA2PpFPTgc9AYYOydQaj7ClFQunESimjXDgg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-pg@0.42.0': + resolution: {integrity: sha512-sjgcM8CswYy8zxHgXv4RAZ09DlYhQ+9TdlourUs63Df/ek5RrB1ZbjznqW7PB6c3TyJJmX6AVtPTjAsROovEjA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation-redis-4@0.40.0': + resolution: {integrity: sha512-0ieQYJb6yl35kXA75LQUPhHtGjtQU9L85KlWa7d4ohBbk/iQKZ3X3CFl5jC5vNMq/GGPB3+w3IxNvALlHtrp7A==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/instrumentation@0.34.0': resolution: {integrity: sha512-VET/bOh4StOQV4vf1sAvn2JD67BhW2vPZ/ynl2gHXyafme2yB8Hs9+tr1TLzFwNGo7jwMFviFQkZjCYxMuK0AA==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/instrumentation@0.43.0': + resolution: {integrity: sha512-S1uHE+sxaepgp+t8lvIDuRgyjJWisAb733198kwQTUc9ZtYQ2V2gmyCtR1x21ePGVLoMiX/NWY7WA290hwkjJQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation@0.51.1': + resolution: {integrity: sha512-JIrvhpgqY6437QIqToyozrUG1h5UhwHkaGK/WAX+fkrpyPtc+RO5FkRtUd9BH0MibabHHvqsnBGKfKVijbmp8w==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation@0.52.0': + resolution: {integrity: sha512-LPwSIrw+60cheWaXsfGL8stBap/AppKQJFE+qqRvzYrgttXFH2ofoIMxWadeqPTq4BYOXM/C7Bdh/T+B60xnlQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/otlp-exporter-base@0.34.0': resolution: {integrity: sha512-xVNvQm7oXeQogeI21iTZRnBrBYS0OVekPutEJgb7jQtHg7x2GWuCBQK9sDo84FRWNXBpNOgSYqsf8/+PxIJ2vA==} engines: {node: '>=14'} @@ -995,6 +1129,10 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.4.0' + '@opentelemetry/redis-common@0.36.2': + resolution: {integrity: sha512-faYX1N0gpLhej/6nyp6bgRjzAKXn5GOEMYY7YhciSfCoITAktLUtQ36d24QEWNA1/WA1y6qQunCe0OhHRkVl9g==} + engines: {node: '>=14'} + '@opentelemetry/resources@1.17.0': resolution: {integrity: sha512-+u0ciVnj8lhuL/qGRBPeVYvk7fL+H/vOddfvmOeJaA1KC+5/3UED1c9KoZQlRsNT5Kw1FaK8LkY2NVLYfOVZQw==} engines: {node: '>=14'} @@ -1007,6 +1145,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.9.0' + '@opentelemetry/resources@1.25.0': + resolution: {integrity: sha512-iHjydPMYJ+Li1auveJCq2rp5U2h6Mhq8BidiyE0jfVlDTFyR1ny8AfJHfmFzJ/RAM8vT8L7T21kcmGybxZC7lQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/resources@1.8.0': resolution: {integrity: sha512-KSyMH6Jvss/PFDy16z5qkCK0ERlpyqixb1xwb73wLMvVq+j7i89lobDjw3JkpCcd1Ws0J6jAI4fw28Zufj2ssg==} engines: {node: '>=14'} @@ -1050,6 +1194,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.9.0' + '@opentelemetry/sdk-trace-base@1.25.0': + resolution: {integrity: sha512-6+g2fiRQUG39guCsKVeY8ToeuUf3YUnPkN6DXRA1qDmFLprlLvZm9cS6+chgbW70cZJ406FTtSCDnJwxDC5sGQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/sdk-trace-base@1.8.0': resolution: {integrity: sha512-iH41m0UTddnCKJzZx3M85vlhKzRcmT48pUeBbnzsGrq4nIay1oWVHKM5nhB5r8qRDGvd/n7f/YLCXClxwM0tvA==} engines: {node: '>=14'} @@ -1070,10 +1220,20 @@ packages: resolution: {integrity: sha512-VkliWlS4/+GHLLW7J/rVBA00uXus1SWvwFvcUDxDwmFxYfg/2VI6ekwdXS28cjI8Qz2ky2BzG8OUHo+WeYIWqw==} engines: {node: '>=14'} + '@opentelemetry/semantic-conventions@1.25.0': + resolution: {integrity: sha512-M+kkXKRAIAiAP6qYyesfrC5TOmDpDVtsxuGfPcqd9B/iBrac+E14jYwrgm0yZBUIbIP2OnqC3j+UgkXLm1vxUQ==} + engines: {node: '>=14'} + '@opentelemetry/semantic-conventions@1.8.0': resolution: {integrity: sha512-TYh1MRcm4JnvpqtqOwT9WYaBYY4KERHdToxs/suDTLviGRsQkIjS5yYROTYTSJQUnYLOn/TuOh5GoMwfLSU+Ew==} engines: {node: '>=14'} + '@opentelemetry/sql-common@0.40.1': + resolution: {integrity: sha512-nSDlnHSqzC3pXn/wZEZVLuAuJ1MYMXPBwtv2qAbCa3847SaHItdE7SzUq/Jtb0KZmh1zfAbNi3AAMjztTT4Ugg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.1.0 + '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} @@ -1090,6 +1250,9 @@ packages: resolution: {integrity: sha512-UA91GwWPhFExt3IizW6bOeY/pQ0BkuNwKjk9iQW9KqxluGCrg4VenZ0/L+2Y0+ZOtme72EVvg6v0zo3AMQRCeA==} engines: {node: '>=12'} + '@prisma/instrumentation@5.15.0': + resolution: {integrity: sha512-fCWOOOajTKOUEp43gRmBqwt6oN9bPJcLiloi2OG/2ED0N5z62Cuza6FDrlm3SJHQAXYlXqLE0HLdEE5WcUkOzg==} + '@protobufjs/aspromise@1.1.2': resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} @@ -1208,6 +1371,10 @@ packages: resolution: {integrity: sha512-J6Wmjjx+o7RwST0weTU1KaKUAlzbc8MGkJV1rcHM9xjNTWTva+nrcCM3vFBagnk2Gm/zhwv3h0PvWEqVyp3U1Q==} engines: {node: '>=8'} + '@sentry/core@8.9.2': + resolution: {integrity: sha512-ixm8NISFlPlEo3FjSaqmq4nnd13BRHoafwJ5MG+okCz6BKGZ1SexEggP42/QpGvDprUUHnfncG6WUMgcarr1zA==} + engines: {node: '>=14.18'} + '@sentry/integrations@7.116.0': resolution: {integrity: sha512-UZb60gaF+7veh1Yv79RiGvgGYOnU6xA97H+hI6tKgc1uT20YpItO4X56Vhp0lvyEyUGFZzBRRH1jpMDPNGPkqw==} engines: {node: '>=8'} @@ -1216,14 +1383,36 @@ packages: resolution: {integrity: sha512-HB/4TrJWbnu6swNzkid+MlwzLwY/D/klGt3R0aatgrgWPo2jJm6bSl4LUT39Cr2eg5I1gsREQtXE2mAlC6gm8w==} engines: {node: '>=8'} + '@sentry/node@8.9.2': + resolution: {integrity: sha512-Q+JBpR4yx3eUyyhwgugucfRtPg65gYvzJGEmjzcnDJXJqX8ms4HPpNv9o2Om7A4014JxIibUdrQ+p5idcT7SZA==} + engines: {node: '>=14.18'} + + '@sentry/opentelemetry@8.9.2': + resolution: {integrity: sha512-Q6SHDQhrsBPcMi7ejqVdNTkt6SCTIhpGsFN8QR7daH3uvM0X2O7ciCuO9gRNRTEkflEINV4SBZEjANYH7BkRAg==} + engines: {node: '>=14.18'} + peerDependencies: + '@opentelemetry/api': ^1.9.0 + '@opentelemetry/core': ^1.25.0 + '@opentelemetry/instrumentation': ^0.52.0 + '@opentelemetry/sdk-trace-base': ^1.25.0 + '@opentelemetry/semantic-conventions': ^1.25.0 + '@sentry/types@7.116.0': resolution: {integrity: sha512-QCCvG5QuQrwgKzV11lolNQPP2k67Q6HHD9vllZ/C4dkxkjoIym8Gy+1OgAN3wjsR0f/kG9o5iZyglgNpUVRapQ==} engines: {node: '>=8'} + '@sentry/types@8.9.2': + resolution: {integrity: sha512-+LFOyQGl+zk5SZRGZD2MEURf7i5RHgP/mt3s85Rza+vz8M211WJ0YsjkIGUJFSY842nged5QLx4JysLaBlLymg==} + engines: {node: '>=14.18'} + '@sentry/utils@7.116.0': resolution: {integrity: sha512-Vn9fcvwTq91wJvCd7WTMWozimqMi+dEZ3ie3EICELC2diONcN16ADFdzn65CQQbYwmUzRjN9EjDN2k41pKZWhQ==} engines: {node: '>=8'} + '@sentry/utils@8.9.2': + resolution: {integrity: sha512-A4srR9mEBFdVXwSEKjQ94msUbVkMr8JeFiEj9ouOFORw/Y/ux/WV2bWVD/ZI9wq0TcTNK8L1wBgU8UMS5lIq3A==} + engines: {node: '>=14.18'} + '@sigstore/bundle@1.1.0': resolution: {integrity: sha512-PFutXEy0SmQxYI4texPw3dd2KewuNqv7OuK1ZFtY2fM754yhvG2KdgwIhRnoEE2uHdtdGNQ8s0lb94dW9sELog==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -1278,6 +1467,9 @@ packages: resolution: {integrity: sha512-qaGV9ltJP0EO25YfFUPhxRVK0evXFIAGicsVXuRim4Ed9cjPxYhNnNJ49SFmbeLgtxpslIkX317IgpfcHPVj/A==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + '@types/accepts@1.3.7': + resolution: {integrity: sha512-Pay9fq2lM2wXPWbteBsRAGiWH2hig4ZE2asK+mm7kUzlxRTfL961rj89I6zV/E3PcIkDqyuBEcMxFT7rccugeQ==} + '@types/body-parser@1.19.5': resolution: {integrity: sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==} @@ -1287,9 +1479,18 @@ packages: '@types/chai@4.3.16': resolution: {integrity: sha512-PatH4iOdyh3MyWtmHVFXLWCCIhUbopaltqddG9BzB+gMIzee2MJrvd+jouii9Z3wzQJruGWAm7WOMjgfG8hQlQ==} + '@types/connect@3.4.36': + resolution: {integrity: sha512-P63Zd/JUGq+PdrM1lv0Wv5SBYeA2+CORvbrXbngriYY0jzLUWfQMQQxOhjONEz/wlHOAxOdY7CY65rgQdTjq2w==} + '@types/connect@3.4.38': resolution: {integrity: sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==} + '@types/content-disposition@0.5.8': + resolution: {integrity: sha512-QVSSvno3dE0MgO76pJhmv4Qyi/j0Yk9pBp0Y7TJ2Tlj+KCgJWY6qX7nnxCOLkZ3VYRSIk1WTxCvwUSdx6CCLdg==} + + '@types/cookies@0.9.0': + resolution: {integrity: sha512-40Zk8qR147RABiQ7NQnBzWzDcjKzNrntB5BAmeGCb2p/MIyOE+4BVvc17wumsUqUw00bJYqoXFHYygQnEFh4/Q==} + '@types/estree@1.0.5': resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} @@ -1299,6 +1500,9 @@ packages: '@types/express@4.17.21': resolution: {integrity: sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==} + '@types/http-assert@1.5.5': + resolution: {integrity: sha512-4+tE/lwdAahgZT1g30Jkdm9PzFRde0xwxBNUyRsCitRvCQB90iuA2uJYdUnhnANRcqGXaWOGY4FEoxeElNAK2g==} + '@types/http-cache-semantics@4.0.4': resolution: {integrity: sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==} @@ -1308,6 +1512,18 @@ packages: '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + '@types/keygrip@1.0.6': + resolution: {integrity: sha512-lZuNAY9xeJt7Bx4t4dx0rYCDqGPW8RXhQZK1td7d4H6E9zYbLoOtjBvfwdTKpsyxQI/2jv+armjX/RW+ZNpXOQ==} + + '@types/koa-compose@3.2.8': + resolution: {integrity: sha512-4Olc63RY+MKvxMwVknCUDhRQX1pFQoBZ/lXcRLP69PQkEpze/0cr8LNqJQe5NFb/b19DWi2a5bTi2VAlQzhJuA==} + + '@types/koa@2.14.0': + resolution: {integrity: sha512-DTDUyznHGNHAl+wd1n0z1jxNajduyTh8R53xoewuerdBzGo6Ogj6F2299BFtrexJw4NtgjsI5SMPCmV9gZwGXA==} + + '@types/koa__router@12.0.3': + resolution: {integrity: sha512-5YUJVv6NwM1z7m6FuYpKfNLTZ932Z6EF6xy2BbtpJSyn13DKNQEkXVffFVSnJHxvwwWh2SAeumpjAYUELqgjyw==} + '@types/lodash@4.17.5': resolution: {integrity: sha512-MBIOHVZqVqgfro1euRDWX7OO0fBVUUMrN6Pwm8LQsz8cWhEpihlvR70ENj3f40j58TNxZaWv2ndSkInykNBBJw==} @@ -1317,6 +1533,9 @@ packages: '@types/minimist@1.2.5': resolution: {integrity: sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==} + '@types/mysql@2.15.22': + resolution: {integrity: sha512-wK1pzsJVVAjYCSZWQoWHziQZbNggXFDUEIGf54g4ZM/ERuP86uGdWeKZWMYlqTPMZfHJJvLPyogXGvCOg87yLQ==} + '@types/node@12.20.55': resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==} @@ -1332,6 +1551,12 @@ packages: '@types/normalize-package-data@2.4.4': resolution: {integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==} + '@types/pg-pool@2.0.4': + resolution: {integrity: sha512-qZAvkv1K3QbmHHFYSNRYPkRjOWRLBYrL4B9c+wG0GSVGBw0NtJwPcgx/DSddeDJvRGMHCEQ4VMEVfuJ/0gZ3XQ==} + + '@types/pg@8.6.1': + resolution: {integrity: sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==} + '@types/qs@6.9.15': resolution: {integrity: sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==} @@ -1350,6 +1575,9 @@ packages: '@types/serve-static@1.15.7': resolution: {integrity: sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==} + '@types/shimmer@1.0.5': + resolution: {integrity: sha512-9Hp0ObzwwO57DpLFF0InUjUm/II8GmKAvzbefxQTihCb7KI6yc9yzf0nLc4mVdby5N4DRCgQM2wCup9KTieeww==} + '@types/strip-bom@3.0.0': resolution: {integrity: sha512-xevGOReSYGM7g/kUBZzPqCrR/KYAo+F0yiPc85WFTJa0MSLtyFTVTU6cJu/aV4mid7IffDIWqo69THF2o4JiEQ==} @@ -1400,6 +1628,16 @@ packages: resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} engines: {node: '>= 0.6'} + acorn-import-assertions@1.9.0: + resolution: {integrity: sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==} + peerDependencies: + acorn: ^8 + + acorn-import-attributes@1.9.5: + resolution: {integrity: sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==} + peerDependencies: + acorn: ^8 + acorn-walk@8.3.2: resolution: {integrity: sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==} engines: {node: '>=0.4.0'} @@ -1680,6 +1918,9 @@ packages: resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} engines: {node: '>=8'} + cjs-module-lexer@1.3.1: + resolution: {integrity: sha512-a3KdPAANPbNE4ZUv9h6LckSl9zLsYOP4MBmhIPkRaeyybt+r4UghLvq+xw/YwUcC1gqylCkL4rdVs3Lwupjm4Q==} + clean-stack@2.2.0: resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} engines: {node: '>=6'} @@ -2384,6 +2625,15 @@ packages: immediate@3.0.6: resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==} + import-in-the-middle@1.4.2: + resolution: {integrity: sha512-9WOz1Yh/cvO/p69sxRmhyQwrIGGSp7EIdcb+fFNVi7CzQGQB8U1/1XrKVSbEd/GNOAeM0peJtmi7+qphe7NvAw==} + + import-in-the-middle@1.7.4: + resolution: {integrity: sha512-Lk+qzWmiQuRPPulGQeK5qq0v32k2bHnWrRPFgqyvhw7Kkov5L6MOLOIU3pcWeujc9W4q54Cp3Q2WV16eQkc7Bg==} + + import-in-the-middle@1.8.0: + resolution: {integrity: sha512-/xQjze8szLNnJ5rvHSzn+dcVXqCAU6Plbk4P24U/jwPmg1wy7IIp9OjKIO5tYue8GSPhDpPDiApQjvBUmWwhsQ==} + import-lazy@4.0.0: resolution: {integrity: sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==} engines: {node: '>=8'} @@ -3109,6 +3359,10 @@ packages: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} + opentelemetry-instrumentation-fetch-node@1.2.0: + resolution: {integrity: sha512-aiSt/4ubOTyb1N5C2ZbGrBvaJOXIZhZvpRPYuUVxQJe27wJZqf/o65iPrqgLcgfeOLaQ8cS2Q+762jrYvniTrA==} + engines: {node: '>18.0.0'} + opentracing@0.14.7: resolution: {integrity: sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q==} engines: {node: '>=0.10'} @@ -3221,6 +3475,17 @@ packages: pause-stream@0.0.11: resolution: {integrity: sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A==} + pg-int8@1.0.1: + resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} + engines: {node: '>=4.0.0'} + + pg-protocol@1.6.1: + resolution: {integrity: sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==} + + pg-types@2.2.0: + resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} + engines: {node: '>=4'} + pgsql-ast-parser@11.2.0: resolution: {integrity: sha512-/8KCcQjePoQDOtfZQuoV/4Y3WpmQVp7E+RFayAdjJpdBdu2dBnKnuQe9XU4g5Td5qC0G+i/fFK/DlNjvWwg+FA==} @@ -3265,6 +3530,22 @@ packages: resolution: {integrity: sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==} engines: {node: ^10 || ^12 || >=14} + postgres-array@2.0.0: + resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} + engines: {node: '>=4'} + + postgres-bytea@1.0.0: + resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} + engines: {node: '>=0.10.0'} + + postgres-date@1.0.7: + resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} + engines: {node: '>=0.10.0'} + + postgres-interval@1.2.0: + resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} + engines: {node: '>=0.10.0'} + preferred-pm@3.1.3: resolution: {integrity: sha512-MkXsENfftWSRpzCzImcp4FRsCc3y1opwB73CfCNWyzMqArju2CrlMHlqB7VexKiPEOjGMbttv1r9fSCn5S610w==} engines: {node: '>=10'} @@ -3474,6 +3755,10 @@ packages: resolution: {integrity: sha512-efCx3b+0Z69/LGJmm9Yvi4cqEdxnoGnxYxGxBghkkTTFeXRtTCmmhO0AnAfHz59k957uTSuy8WaHqOs8wbYUWg==} engines: {node: '>=6'} + require-in-the-middle@7.3.0: + resolution: {integrity: sha512-nQFEv9gRw6SJAwWD2LrL0NmQvAcO7FBwJbwmr2ttPAacfy0xuiOjE5zt+zM4xDyuyvUaxBi/9gb2SoCyNEVJcw==} + engines: {node: '>=8.6.0'} + require-main-filename@2.0.0: resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==} @@ -5087,12 +5372,22 @@ snapshots: dependencies: '@opentelemetry/api': 1.8.0 + '@opentelemetry/api-logs@0.52.0': + dependencies: + '@opentelemetry/api': 1.8.0 + '@opentelemetry/api@1.3.0': {} '@opentelemetry/api@1.6.0': {} '@opentelemetry/api@1.8.0': {} + '@opentelemetry/api@1.9.0': {} + + '@opentelemetry/context-async-hooks@1.25.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/context-async-hooks@1.8.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5117,6 +5412,21 @@ snapshots: '@opentelemetry/api': 1.8.0 '@opentelemetry/semantic-conventions': 1.24.1 + '@opentelemetry/core@1.24.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.24.1 + + '@opentelemetry/core@1.25.0(@opentelemetry/api@1.8.0)': + dependencies: + '@opentelemetry/api': 1.8.0 + '@opentelemetry/semantic-conventions': 1.25.0 + + '@opentelemetry/core@1.25.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.25.0 + '@opentelemetry/core@1.8.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5210,6 +5520,50 @@ snapshots: '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) '@opentelemetry/semantic-conventions': 1.8.0 + '@opentelemetry/instrumentation-connect@0.37.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + '@types/connect': 3.4.36 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-express@0.40.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-fastify@0.37.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-graphql@0.41.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-hapi@0.39.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + transitivePeerDependencies: + - supports-color + '@opentelemetry/instrumentation-http@0.34.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5220,6 +5574,36 @@ snapshots: transitivePeerDependencies: - supports-color + '@opentelemetry/instrumentation-http@0.52.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + semver: 7.6.2 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-ioredis@0.41.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/redis-common': 0.36.2 + '@opentelemetry/semantic-conventions': 1.25.0 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-koa@0.41.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + '@types/koa': 2.14.0 + '@types/koa__router': 12.0.3 + transitivePeerDependencies: + - supports-color + '@opentelemetry/instrumentation-mongodb@0.33.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5228,6 +5612,70 @@ snapshots: transitivePeerDependencies: - supports-color + '@opentelemetry/instrumentation-mongodb@0.45.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-mongoose@0.39.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-mysql2@0.39.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.9.0) + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-mysql@0.39.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + '@types/mysql': 2.15.22 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-nestjs-core@0.38.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-pg@0.42.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.9.0) + '@types/pg': 8.6.1 + '@types/pg-pool': 2.0.4 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation-redis-4@0.40.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/redis-common': 0.36.2 + '@opentelemetry/semantic-conventions': 1.25.0 + transitivePeerDependencies: + - supports-color + '@opentelemetry/instrumentation@0.34.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5237,6 +5685,42 @@ snapshots: transitivePeerDependencies: - supports-color + '@opentelemetry/instrumentation@0.43.0(@opentelemetry/api@1.8.0)': + dependencies: + '@opentelemetry/api': 1.8.0 + '@types/shimmer': 1.0.5 + import-in-the-middle: 1.4.2 + require-in-the-middle: 7.3.0 + semver: 7.6.2 + shimmer: 1.2.1 + transitivePeerDependencies: + - supports-color + optional: true + + '@opentelemetry/instrumentation@0.51.1(@opentelemetry/api@1.8.0)': + dependencies: + '@opentelemetry/api': 1.8.0 + '@opentelemetry/api-logs': 0.51.1 + '@types/shimmer': 1.0.5 + import-in-the-middle: 1.7.4 + require-in-the-middle: 7.3.0 + semver: 7.6.2 + shimmer: 1.2.1 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.52.0 + '@types/shimmer': 1.0.5 + import-in-the-middle: 1.8.0 + require-in-the-middle: 7.3.0 + semver: 7.6.2 + shimmer: 1.2.1 + transitivePeerDependencies: + - supports-color + '@opentelemetry/otlp-exporter-base@0.34.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5290,6 +5774,8 @@ snapshots: '@opentelemetry/api': 1.3.0 '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) + '@opentelemetry/redis-common@0.36.2': {} + '@opentelemetry/resources@1.17.0(@opentelemetry/api@1.6.0)': dependencies: '@opentelemetry/api': 1.6.0 @@ -5308,6 +5794,24 @@ snapshots: '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) '@opentelemetry/semantic-conventions': 1.24.1 + '@opentelemetry/resources@1.24.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.24.1 + + '@opentelemetry/resources@1.25.0(@opentelemetry/api@1.8.0)': + dependencies: + '@opentelemetry/api': 1.8.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.8.0) + '@opentelemetry/semantic-conventions': 1.25.0 + + '@opentelemetry/resources@1.25.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + '@opentelemetry/resources@1.8.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5342,6 +5846,13 @@ snapshots: '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) lodash.merge: 4.6.2 + '@opentelemetry/sdk-metrics@1.24.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) + lodash.merge: 4.6.2 + '@opentelemetry/sdk-metrics@1.8.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5374,6 +5885,20 @@ snapshots: '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) '@opentelemetry/semantic-conventions': 1.24.1 + '@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.8.0)': + dependencies: + '@opentelemetry/api': 1.8.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.8.0) + '@opentelemetry/resources': 1.25.0(@opentelemetry/api@1.8.0) + '@opentelemetry/semantic-conventions': 1.25.0 + + '@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + '@opentelemetry/sdk-trace-base@1.8.0(@opentelemetry/api@1.3.0)': dependencies: '@opentelemetry/api': 1.3.0 @@ -5395,8 +5920,15 @@ snapshots: '@opentelemetry/semantic-conventions@1.24.1': {} + '@opentelemetry/semantic-conventions@1.25.0': {} + '@opentelemetry/semantic-conventions@1.8.0': {} + '@opentelemetry/sql-common@0.40.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@pkgjs/parseargs@0.11.0': optional: true @@ -5412,6 +5944,14 @@ snapshots: '@pnpm/network.ca-file': 1.0.2 config-chain: 1.1.13 + '@prisma/instrumentation@5.15.0': + dependencies: + '@opentelemetry/api': 1.8.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/sdk-trace-base': 1.25.0(@opentelemetry/api@1.8.0) + transitivePeerDependencies: + - supports-color + '@protobufjs/aspromise@1.1.2': {} '@protobufjs/base64@1.1.2': {} @@ -5494,6 +6034,11 @@ snapshots: '@sentry/types': 7.116.0 '@sentry/utils': 7.116.0 + '@sentry/core@8.9.2': + dependencies: + '@sentry/types': 8.9.2 + '@sentry/utils': 8.9.2 + '@sentry/integrations@7.116.0': dependencies: '@sentry/core': 7.116.0 @@ -5509,12 +6054,63 @@ snapshots: '@sentry/types': 7.116.0 '@sentry/utils': 7.116.0 + '@sentry/node@8.9.2': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/context-async-hooks': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-connect': 0.37.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-express': 0.40.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-fastify': 0.37.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-graphql': 0.41.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-hapi': 0.39.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-http': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-ioredis': 0.41.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-koa': 0.41.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mongodb': 0.45.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mongoose': 0.39.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mysql': 0.39.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mysql2': 0.39.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-nestjs-core': 0.38.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-pg': 0.42.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-redis-4': 0.40.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + '@prisma/instrumentation': 5.15.0 + '@sentry/core': 8.9.2 + '@sentry/opentelemetry': 8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.0) + '@sentry/types': 8.9.2 + '@sentry/utils': 8.9.2 + optionalDependencies: + opentelemetry-instrumentation-fetch-node: 1.2.0 + transitivePeerDependencies: + - supports-color + + '@sentry/opentelemetry@8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.0 + '@sentry/core': 8.9.2 + '@sentry/types': 8.9.2 + '@sentry/utils': 8.9.2 + '@sentry/types@7.116.0': {} + '@sentry/types@8.9.2': {} + '@sentry/utils@7.116.0': dependencies: '@sentry/types': 7.116.0 + '@sentry/utils@8.9.2': + dependencies: + '@sentry/types': 8.9.2 + '@sigstore/bundle@1.1.0': dependencies: '@sigstore/protobuf-specs': 0.2.1 @@ -5565,6 +6161,10 @@ snapshots: '@tufjs/canonical-json': 1.0.0 minimatch: 9.0.4 + '@types/accepts@1.3.7': + dependencies: + '@types/node': 18.11.11 + '@types/body-parser@1.19.5': dependencies: '@types/connect': 3.4.38 @@ -5576,10 +6176,23 @@ snapshots: '@types/chai@4.3.16': {} + '@types/connect@3.4.36': + dependencies: + '@types/node': 18.11.11 + '@types/connect@3.4.38': dependencies: '@types/node': 18.11.11 + '@types/content-disposition@0.5.8': {} + + '@types/cookies@0.9.0': + dependencies: + '@types/connect': 3.4.38 + '@types/express': 4.17.21 + '@types/keygrip': 1.0.6 + '@types/node': 18.11.11 + '@types/estree@1.0.5': {} '@types/express-serve-static-core@4.19.1': @@ -5596,18 +6209,45 @@ snapshots: '@types/qs': 6.9.15 '@types/serve-static': 1.15.7 + '@types/http-assert@1.5.5': {} + '@types/http-cache-semantics@4.0.4': {} '@types/http-errors@2.0.4': {} '@types/json-schema@7.0.15': {} + '@types/keygrip@1.0.6': {} + + '@types/koa-compose@3.2.8': + dependencies: + '@types/koa': 2.14.0 + + '@types/koa@2.14.0': + dependencies: + '@types/accepts': 1.3.7 + '@types/content-disposition': 0.5.8 + '@types/cookies': 0.9.0 + '@types/http-assert': 1.5.5 + '@types/http-errors': 2.0.4 + '@types/keygrip': 1.0.6 + '@types/koa-compose': 3.2.8 + '@types/node': 18.11.11 + + '@types/koa__router@12.0.3': + dependencies: + '@types/koa': 2.14.0 + '@types/lodash@4.17.5': {} '@types/mime@1.3.5': {} '@types/minimist@1.2.5': {} + '@types/mysql@2.15.22': + dependencies: + '@types/node': 18.11.11 + '@types/node@12.20.55': {} '@types/node@13.13.52': {} @@ -5618,6 +6258,16 @@ snapshots: '@types/normalize-package-data@2.4.4': {} + '@types/pg-pool@2.0.4': + dependencies: + '@types/pg': 8.6.1 + + '@types/pg@8.6.1': + dependencies: + '@types/node': 18.11.11 + pg-protocol: 1.6.1 + pg-types: 2.2.0 + '@types/qs@6.9.15': {} '@types/range-parser@1.2.7': {} @@ -5637,6 +6287,8 @@ snapshots: '@types/node': 18.11.11 '@types/send': 0.17.4 + '@types/shimmer@1.0.5': {} + '@types/strip-bom@3.0.0': {} '@types/strip-json-comments@0.0.30': {} @@ -5696,6 +6348,15 @@ snapshots: mime-types: 2.1.35 negotiator: 0.6.3 + acorn-import-assertions@1.9.0(acorn@8.11.3): + dependencies: + acorn: 8.11.3 + optional: true + + acorn-import-attributes@1.9.5(acorn@8.11.3): + dependencies: + acorn: 8.11.3 + acorn-walk@8.3.2: {} acorn@8.11.3: {} @@ -6036,6 +6697,8 @@ snapshots: ci-info@3.9.0: {} + cjs-module-lexer@1.3.1: {} + clean-stack@2.2.0: {} cli-boxes@3.0.0: {} @@ -6877,6 +7540,28 @@ snapshots: immediate@3.0.6: {} + import-in-the-middle@1.4.2: + dependencies: + acorn: 8.11.3 + acorn-import-assertions: 1.9.0(acorn@8.11.3) + cjs-module-lexer: 1.3.1 + module-details-from-path: 1.0.3 + optional: true + + import-in-the-middle@1.7.4: + dependencies: + acorn: 8.11.3 + acorn-import-attributes: 1.9.5(acorn@8.11.3) + cjs-module-lexer: 1.3.1 + module-details-from-path: 1.0.3 + + import-in-the-middle@1.8.0: + dependencies: + acorn: 8.11.3 + acorn-import-attributes: 1.9.5(acorn@8.11.3) + cjs-module-lexer: 1.3.1 + module-details-from-path: 1.0.3 + import-lazy@4.0.0: {} imurmurhash@0.1.4: {} @@ -7628,6 +8313,15 @@ snapshots: dependencies: mimic-fn: 2.1.0 + opentelemetry-instrumentation-fetch-node@1.2.0: + dependencies: + '@opentelemetry/api': 1.8.0 + '@opentelemetry/instrumentation': 0.43.0(@opentelemetry/api@1.8.0) + '@opentelemetry/semantic-conventions': 1.25.0 + transitivePeerDependencies: + - supports-color + optional: true + opentracing@0.14.7: {} ora@5.4.1: @@ -7747,6 +8441,18 @@ snapshots: dependencies: through: 2.3.8 + pg-int8@1.0.1: {} + + pg-protocol@1.6.1: {} + + pg-types@2.2.0: + dependencies: + pg-int8: 1.0.1 + postgres-array: 2.0.0 + postgres-bytea: 1.0.0 + postgres-date: 1.0.7 + postgres-interval: 1.2.0 + pgsql-ast-parser@11.2.0: dependencies: moo: 0.5.2 @@ -7799,6 +8505,16 @@ snapshots: picocolors: 1.0.1 source-map-js: 1.2.0 + postgres-array@2.0.0: {} + + postgres-bytea@1.0.0: {} + + postgres-date@1.0.7: {} + + postgres-interval@1.2.0: + dependencies: + xtend: 4.0.2 + preferred-pm@3.1.3: dependencies: find-up: 5.0.0 @@ -8042,6 +8758,14 @@ snapshots: transitivePeerDependencies: - supports-color + require-in-the-middle@7.3.0: + dependencies: + debug: 4.3.4(supports-color@5.5.0) + module-details-from-path: 1.0.3 + resolve: 1.22.8 + transitivePeerDependencies: + - supports-color + require-main-filename@2.0.0: {} resolve-alpn@1.2.1: {} diff --git a/service/package.json b/service/package.json index 59acbb256..9ff37aace 100644 --- a/service/package.json +++ b/service/package.json @@ -23,6 +23,7 @@ "@powersync/service-rsocket-router": "workspace:*", "@powersync/service-sync-rules": "workspace:*", "@powersync/service-types": "workspace:*", + "@sentry/node": "^8.9.2", "async-mutex": "^0.5.0", "bson": "^6.6.0", "commander": "^12.0.0", @@ -41,6 +42,7 @@ "yaml": "^2.3.2" }, "devDependencies": { + "@sentry/types": "^8.9.2", "@types/uuid": "^9.0.4", "copyfiles": "^2.4.1", "nodemon": "^3.0.1", diff --git a/service/src/entry.ts b/service/src/entry.ts index fcba2d7a3..4d85065ea 100644 --- a/service/src/entry.ts +++ b/service/src/entry.ts @@ -1,6 +1,3 @@ -import '@journeyapps-platform/micro/register'; -import './util/register-alerting.js'; - import winston from 'winston'; import { entry, utils } from '@powersync/service-core'; import * as framework from '@powersync/service-framework'; diff --git a/service/src/system/PowerSyncSystem.ts b/service/src/system/PowerSyncSystem.ts index 93024dca7..249219610 100644 --- a/service/src/system/PowerSyncSystem.ts +++ b/service/src/system/PowerSyncSystem.ts @@ -1,12 +1,14 @@ import { db, system, utils, storage, Metrics } from '@powersync/service-core'; import * as pgwire from '@powersync/service-jpgwire'; +import { sentryErrorReporter } from '../util/alerting.js'; + export class PowerSyncSystem extends system.CorePowerSyncSystem { storage: storage.BucketStorageFactory; pgwire_pool?: pgwire.PgClient; constructor(public config: utils.ResolvedPowerSyncConfig) { - super(config); + super(config, { errorReporter: sentryErrorReporter }); utils.setTags(config.metadata); @@ -32,7 +34,8 @@ export class PowerSyncSystem extends system.CorePowerSyncSystem { }); const database = new storage.PowerSyncMongo(client, { database: config.storage.database }); this.storage = new storage.MongoBucketStorage(database, { - slot_name_prefix: config.slot_name_prefix + slot_name_prefix: config.slot_name_prefix, + errorReporter: this.errorReporter }); } else { throw new Error('No storage configured'); diff --git a/service/src/util/alerting.ts b/service/src/util/alerting.ts new file mode 100644 index 000000000..f2c579eb5 --- /dev/null +++ b/service/src/util/alerting.ts @@ -0,0 +1,66 @@ +import * as sentry_types from '@sentry/types'; +import * as sentry from '@sentry/node'; +import { utils } from '@powersync/service-core'; +import * as framework from '@powersync/service-framework'; + +// Generally ignore errors that are due to configuration issues, rather than +// service bugs. +// These can cause massive volumes of errors on Sentry, and don't add value. +const IGNORE_TYPES = ['AbortError', 'AuthorizationError']; + +const IGNORE_MESSAGES: RegExp[] = [ + /** + * Self hosted cases might want to be notified about these + * messages. + */ + // /^getaddrinfo ENOTFOUND/, + // /^connect ECONNREFUSED/, + // /^Timeout while connecting/, + // /^certificate has expired/ +]; + +export const createSentryReporter = (opts?: { + beforeSend?: (event: sentry_types.Event, hint: sentry_types.EventHint) => any; +}): framework.ErrorReporter => { + if (process.env.SENTRY_DSN) { + sentry.init({ + dsn: process.env.SENTRY_DSN, + release: process.env.SHA, + environment: process.env.MICRO_ENVIRONMENT_NAME, + serverName: process.env.HOST_NAME, + beforeSend: opts?.beforeSend ? opts.beforeSend : undefined + }); + } else { + framework.logger.debug( + 'micro-alerts configured with sentry reporter but no SENTRY_DSN environment variable has been set' + ); + } + + return { + captureException: (error, options) => { + sentry.captureException(error, options); + }, + captureMessage: (message, options) => { + sentry.captureMessage(message, options); + } + }; +}; + +export const sentryErrorReporter = createSentryReporter({ + beforeSend: (event, _hint) => { + const error = event.exception?.values?.[0]; + if (error?.type != null && IGNORE_TYPES.includes(error.type)) { + return; + } + const message = error?.value ?? ''; + for (let re of IGNORE_MESSAGES) { + if (re.test(message)) { + return; + } + } + + // Inject our tags + event.tags = Object.assign({}, utils.getGlobalTags(), event.tags); + return event; + } +}); diff --git a/service/src/util/register-alerting.ts b/service/src/util/register-alerting.ts deleted file mode 100644 index cb09fdc40..000000000 --- a/service/src/util/register-alerting.ts +++ /dev/null @@ -1,41 +0,0 @@ -import * as micro from '@journeyapps-platform/micro'; -import { utils } from '@powersync/service-core'; - -// Generally ignore errors that are due to configuration issues, rather than -// service bugs. -// These can cause massive volumes of errors on Sentry, and don't add value. -// These errors are better debugged via Collide. - -const IGNORE_TYPES = [ - 'PgError.28P01', // password authentication failed - 'AbortError', - 'AuthorizationError' -]; - -const IGNORE_MESSAGES = [ - /^getaddrinfo ENOTFOUND/, - /^connect ECONNREFUSED/, - /^Timeout while connecting/, - /^certificate has expired/ -]; - -micro.alerts.register({ - reporter: micro.alerts.reporters.createSentryReporter({ - beforeSend: (event, _hint) => { - const error = event.exception?.values?.[0]; - if (error?.type != null && IGNORE_TYPES.includes(error.type)) { - return; - } - const message = error?.value ?? ''; - for (let re of IGNORE_MESSAGES) { - if (re.test(message)) { - return; - } - } - - // Inject our tags - event.tags = Object.assign({}, utils.getGlobalTags(), event.tags); - return event; - } - }) -}); From 3f6b138973263247db8cb8386cffc37d6238c19a Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Tue, 18 Jun 2024 13:45:02 +0200 Subject: [PATCH 14/36] ESM module --- packages/service-framework/package.json | 1 + .../src/alerts/alerts-index.ts | 4 +-- .../src/alerts/definitions.ts | 2 +- .../src/alerts/no-op-reporter.ts | 2 +- .../src/codec/codec-index.ts | 4 +-- .../service-framework/src/codec/parsers.ts | 2 +- .../src/errors/errors-index.ts | 4 +-- .../service-framework/src/errors/utils.ts | 2 +- packages/service-framework/src/index.ts | 30 +++++++++---------- .../src/logger/logger-index.ts | 2 +- .../src/schema/json-schema/parser.ts | 4 +-- .../src/schema/validators/schema-validator.ts | 12 ++++---- .../schema/validators/ts-codec-validator.ts | 6 ++-- .../src/signals/signals-index.ts | 8 ++--- .../src/system/LifeCycledSystem.ts | 6 ++-- .../src/system/system-index.ts | 2 +- .../src/utils/utils-index.ts | 2 +- service/src/runners/server.ts | 4 +++ 18 files changed, 51 insertions(+), 46 deletions(-) diff --git a/packages/service-framework/package.json b/packages/service-framework/package.json index a1874bdd3..366e348c7 100644 --- a/packages/service-framework/package.json +++ b/packages/service-framework/package.json @@ -3,6 +3,7 @@ "repository": "https://github.com/powersync-ja/powersync-service", "version": "0.0.1", "main": "dist/index.js", + "type": "module", "types": "dist/index.d.ts", "license": "FSL-1.1-Apache-2.0", "publishConfig": { diff --git a/packages/service-framework/src/alerts/alerts-index.ts b/packages/service-framework/src/alerts/alerts-index.ts index 32f970c74..b548a32f2 100644 --- a/packages/service-framework/src/alerts/alerts-index.ts +++ b/packages/service-framework/src/alerts/alerts-index.ts @@ -1,2 +1,2 @@ -export * from './definitions'; -export * from './no-op-reporter'; +export * from './definitions.js'; +export * from './no-op-reporter.js'; diff --git a/packages/service-framework/src/alerts/definitions.ts b/packages/service-framework/src/alerts/definitions.ts index 9d2906ff8..64359c0a9 100644 --- a/packages/service-framework/src/alerts/definitions.ts +++ b/packages/service-framework/src/alerts/definitions.ts @@ -1,4 +1,4 @@ -import * as errors from '../errors/errors-index'; +import * as errors from '../errors/errors-index.js'; export type Primitive = string | number | boolean; diff --git a/packages/service-framework/src/alerts/no-op-reporter.ts b/packages/service-framework/src/alerts/no-op-reporter.ts index b8c887665..c5e7ec826 100644 --- a/packages/service-framework/src/alerts/no-op-reporter.ts +++ b/packages/service-framework/src/alerts/no-op-reporter.ts @@ -1,4 +1,4 @@ -import { ErrorReporter } from './definitions'; +import { ErrorReporter } from './definitions.js'; export const NoOpReporter: ErrorReporter = { captureException: () => {}, diff --git a/packages/service-framework/src/codec/codec-index.ts b/packages/service-framework/src/codec/codec-index.ts index d044f08c2..39878d1de 100644 --- a/packages/service-framework/src/codec/codec-index.ts +++ b/packages/service-framework/src/codec/codec-index.ts @@ -1,2 +1,2 @@ -export * from './codecs'; -export * from './parsers'; +export * from './codecs.js'; +export * from './parsers.js'; diff --git a/packages/service-framework/src/codec/parsers.ts b/packages/service-framework/src/codec/parsers.ts index 038636ebe..66791eafb 100644 --- a/packages/service-framework/src/codec/parsers.ts +++ b/packages/service-framework/src/codec/parsers.ts @@ -1,5 +1,5 @@ -import * as codecs from './codecs'; import * as t from 'ts-codec'; +import * as codecs from './codecs.js'; export const ObjectIdParser = t.createParser(codecs.ObjectId._tag, (_, { target }) => { switch (target) { diff --git a/packages/service-framework/src/errors/errors-index.ts b/packages/service-framework/src/errors/errors-index.ts index 12d153d5c..4075db519 100644 --- a/packages/service-framework/src/errors/errors-index.ts +++ b/packages/service-framework/src/errors/errors-index.ts @@ -1,2 +1,2 @@ -export * from './framework-errors'; -export * from './utils'; +export * from './framework-errors.js'; +export * from './utils.js'; diff --git a/packages/service-framework/src/errors/utils.ts b/packages/service-framework/src/errors/utils.ts index b9737811d..9ff2ec349 100644 --- a/packages/service-framework/src/errors/utils.ts +++ b/packages/service-framework/src/errors/utils.ts @@ -1,4 +1,4 @@ -import { ErrorData, JourneyError } from './framework-errors'; +import { ErrorData, JourneyError } from './framework-errors.js'; export const isJourneyError = (err: any): err is JourneyError => { const matches = err instanceof JourneyError || err.is_journey_error; diff --git a/packages/service-framework/src/index.ts b/packages/service-framework/src/index.ts index 7f128e3e8..c121e2041 100644 --- a/packages/service-framework/src/index.ts +++ b/packages/service-framework/src/index.ts @@ -1,22 +1,22 @@ -export * from './alerts/alerts-index'; -export * as alerts from './alerts/alerts-index'; +export * from './alerts/alerts-index.js'; +export * as alerts from './alerts/alerts-index.js'; -export * from './codec/codec-index'; -export * as codecs from './codec/codec-index'; +export * from './codec/codec-index.js'; +export * as codecs from './codec/codec-index.js'; -export * from './errors/errors-index'; -export * as errors from './errors/errors-index'; +export * from './errors/errors-index.js'; +export * as errors from './errors/errors-index.js'; -export * from './logger/Logger'; +export * from './logger/Logger.js'; -export * from './schema/schema-index'; -export * as schema from './schema/schema-index'; +export * from './schema/schema-index.js'; +export * as schema from './schema/schema-index.js'; -export * from './signals/signals-index'; -export * as signals from './signals/signals-index'; +export * from './signals/signals-index.js'; +export * as signals from './signals/signals-index.js'; -export * from './system/LifeCycledSystem'; -export * as system from './system/system-index'; +export * from './system/LifeCycledSystem.js'; +export * as system from './system/system-index.js'; -export * from './utils/utils-index'; -export * as utils from './utils/utils-index'; +export * from './utils/utils-index.js'; +export * as utils from './utils/utils-index.js'; diff --git a/packages/service-framework/src/logger/logger-index.ts b/packages/service-framework/src/logger/logger-index.ts index 58309e335..c2e6ca6eb 100644 --- a/packages/service-framework/src/logger/logger-index.ts +++ b/packages/service-framework/src/logger/logger-index.ts @@ -1 +1 @@ -export * from './Logger'; +export * from './Logger.js'; diff --git a/packages/service-framework/src/schema/json-schema/parser.ts b/packages/service-framework/src/schema/json-schema/parser.ts index 38f4593f5..aa0371a21 100644 --- a/packages/service-framework/src/schema/json-schema/parser.ts +++ b/packages/service-framework/src/schema/json-schema/parser.ts @@ -1,5 +1,5 @@ -import * as schema_validator from '../validators/schema-validator'; -import * as defs from '../definitions'; +import * as schema_validator from '../validators/schema-validator.js'; +import * as defs from '../definitions.js'; /** * Recursively walk a given schema resolving a list of refs that are actively used in some way by the diff --git a/packages/service-framework/src/schema/validators/schema-validator.ts b/packages/service-framework/src/schema/validators/schema-validator.ts index 4ffb603bd..df33bc68f 100644 --- a/packages/service-framework/src/schema/validators/schema-validator.ts +++ b/packages/service-framework/src/schema/validators/schema-validator.ts @@ -1,10 +1,10 @@ // @ts-ignore import AjvErrorFormatter from 'better-ajv-errors'; -import AJV, * as ajv from 'ajv'; +import AJV from 'ajv'; -import * as defs from '../definitions'; -import * as utils from '../utils'; -import * as keywords from '../json-schema/keywords'; +import * as defs from '../definitions.js'; +import * as utils from '../utils.js'; +import * as keywords from '../json-schema/keywords.js'; export class SchemaValidatorError extends Error { constructor(message: string) { @@ -17,7 +17,7 @@ export class SchemaValidatorError extends Error { export type SchemaValidator = defs.MicroValidator; export type CreateSchemaValidatorParams = { - ajv?: ajv.Options; + ajv?: AJV.Options; /** * Allow making the given schema loosely typed to allow accepting additional properties. This @@ -38,7 +38,7 @@ export const createSchemaValidator = ( params: CreateSchemaValidatorParams = {} ): SchemaValidator => { try { - const ajv = new AJV({ + const ajv = new AJV.Ajv({ allErrors: !(params.fail_fast ?? false), keywords: [keywords.BufferNodeType], ...(params.ajv || {}) diff --git a/packages/service-framework/src/schema/validators/ts-codec-validator.ts b/packages/service-framework/src/schema/validators/ts-codec-validator.ts index bfca98340..d343a9a09 100644 --- a/packages/service-framework/src/schema/validators/ts-codec-validator.ts +++ b/packages/service-framework/src/schema/validators/ts-codec-validator.ts @@ -1,8 +1,8 @@ import * as t from 'ts-codec'; -import * as schema_validator from './schema-validator'; -import * as defs from '../definitions'; -import * as codecs from '../../codec/codec-index'; +import * as schema_validator from './schema-validator.js'; +import * as defs from '../definitions.js'; +import * as codecs from '../../codec/codec-index.js'; export type TsCodecValidator< C extends t.AnyCodec, diff --git a/packages/service-framework/src/signals/signals-index.ts b/packages/service-framework/src/signals/signals-index.ts index 8ae0a96af..9e5c16ff9 100644 --- a/packages/service-framework/src/signals/signals-index.ts +++ b/packages/service-framework/src/signals/signals-index.ts @@ -1,4 +1,4 @@ -export * from './probes/fs-probes'; -export * from './probes/memory-probes'; -export * from './probes/probes'; -export * from './termination-handler'; +export * from './probes/fs-probes.js'; +export * from './probes/memory-probes.js'; +export * from './probes/probes.js'; +export * from './termination-handler.js'; diff --git a/packages/service-framework/src/system/LifeCycledSystem.ts b/packages/service-framework/src/system/LifeCycledSystem.ts index 8736a4c2b..d43f871fd 100644 --- a/packages/service-framework/src/system/LifeCycledSystem.ts +++ b/packages/service-framework/src/system/LifeCycledSystem.ts @@ -6,9 +6,9 @@ * A System can contain anything but should offer a `start` and `stop` operation */ -import { createFSProbe } from '../signals/probes/fs-probes'; -import { ProbeModule } from '../signals/probes/probes'; -import { TerminationHandler, createTerminationHandler } from '../signals/termination-handler'; +import { createFSProbe } from '../signals/probes/fs-probes.js'; +import { ProbeModule } from '../signals/probes/probes.js'; +import { TerminationHandler, createTerminationHandler } from '../signals/termination-handler.js'; export type LifecycleCallback = (singleton: T) => Promise | void; diff --git a/packages/service-framework/src/system/system-index.ts b/packages/service-framework/src/system/system-index.ts index bbd8eca75..174f25933 100644 --- a/packages/service-framework/src/system/system-index.ts +++ b/packages/service-framework/src/system/system-index.ts @@ -1 +1 @@ -export * from './LifeCycledSystem'; +export * from './LifeCycledSystem.js'; diff --git a/packages/service-framework/src/utils/utils-index.ts b/packages/service-framework/src/utils/utils-index.ts index 29115f7c7..17384042b 100644 --- a/packages/service-framework/src/utils/utils-index.ts +++ b/packages/service-framework/src/utils/utils-index.ts @@ -1 +1 @@ -export * from './environment-variables'; +export * from './environment-variables.js'; diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index f0c4ff89b..7eccf3345 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -30,6 +30,10 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { }) ); + server.route({ + method: fastify. + }) + server.register(cors, { origin: '*', allowedHeaders: ['Content-Type', 'Authorization'], From e2e2b7bb8f6f81e5a8387bf3aa8cd9a1f390041a Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Wed, 19 Jun 2024 17:24:47 +0200 Subject: [PATCH 15/36] implement thin fastify router --- .changeset/shaggy-parents-join.md | 5 + .../src/router/ReactiveSocketRouter.ts | 16 +- packages/rsocket-router/src/router/types.ts | 28 ++- .../rsocket-router/tests/src/requests.test.ts | 1 - .../rsocket-router/tests/src/socket.test.ts | 3 +- packages/service-core/package.json | 7 +- packages/service-core/src/routes/admin.ts | 231 ----------------- packages/service-core/src/routes/auth.ts | 34 +-- .../service-core/src/routes/checkpointing.ts | 37 --- packages/service-core/src/routes/dev.ts | 194 -------------- .../src/routes/endpoints/admin.ts | 237 ++++++++++++++++++ .../src/routes/endpoints/checkpointing.ts | 41 +++ .../service-core/src/routes/endpoints/dev.ts | 199 +++++++++++++++ .../routes/endpoints/route-endpoints-index.ts | 6 + .../routes/{ => endpoints}/socket-route.ts | 13 +- .../src/routes/endpoints/sync-rules.ts | 227 +++++++++++++++++ .../src/routes/endpoints/sync-stream.ts | 98 ++++++++ packages/service-core/src/routes/hooks.ts | 47 ++++ .../src/routes/route-generators.ts | 39 --- .../service-core/src/routes/route-register.ts | 106 ++++++++ packages/service-core/src/routes/router.ts | 38 ++- .../service-core/src/routes/routes-index.ts | 9 +- .../service-core/src/routes/sync-rules.ts | 211 ---------------- .../service-core/src/routes/sync-stream.ts | 92 ------- .../src/system/CorePowerSyncSystem.ts | 2 - .../service-framework/src/codec/codecs.ts | 42 ---- .../src/errors/framework-errors.ts | 18 +- .../service-framework/src/errors/utils.ts | 2 +- packages/service-framework/src/index.ts | 3 + .../service-framework/src/router/endpoint.ts | 23 ++ .../src/router/router-definitions.ts | 51 ++++ .../src/router/router-index.ts | 3 + .../src/router/router-response.ts | 48 ++++ .../src/schema/schema-index.ts | 10 +- .../service-framework/src/schema/utils.ts | 4 +- .../src/signals/termination-handler.ts | 6 +- .../src/utils/environment-variables.ts | 4 +- .../service-framework/test/src/errors.test.ts | 18 +- pnpm-lock.yaml | 24 +- service/src/routes/router.ts | 31 +-- service/src/runners/server.ts | 97 ++++--- 41 files changed, 1307 insertions(+), 998 deletions(-) create mode 100644 .changeset/shaggy-parents-join.md delete mode 100644 packages/service-core/src/routes/admin.ts delete mode 100644 packages/service-core/src/routes/checkpointing.ts delete mode 100644 packages/service-core/src/routes/dev.ts create mode 100644 packages/service-core/src/routes/endpoints/admin.ts create mode 100644 packages/service-core/src/routes/endpoints/checkpointing.ts create mode 100644 packages/service-core/src/routes/endpoints/dev.ts create mode 100644 packages/service-core/src/routes/endpoints/route-endpoints-index.ts rename packages/service-core/src/routes/{ => endpoints}/socket-route.ts (91%) create mode 100644 packages/service-core/src/routes/endpoints/sync-rules.ts create mode 100644 packages/service-core/src/routes/endpoints/sync-stream.ts create mode 100644 packages/service-core/src/routes/hooks.ts delete mode 100644 packages/service-core/src/routes/route-generators.ts create mode 100644 packages/service-core/src/routes/route-register.ts delete mode 100644 packages/service-core/src/routes/sync-rules.ts delete mode 100644 packages/service-core/src/routes/sync-stream.ts create mode 100644 packages/service-framework/src/router/endpoint.ts create mode 100644 packages/service-framework/src/router/router-definitions.ts create mode 100644 packages/service-framework/src/router/router-index.ts create mode 100644 packages/service-framework/src/router/router-response.ts diff --git a/.changeset/shaggy-parents-join.md b/.changeset/shaggy-parents-join.md new file mode 100644 index 000000000..a9208585f --- /dev/null +++ b/.changeset/shaggy-parents-join.md @@ -0,0 +1,5 @@ +--- +'@powersync/service-core': minor +--- + +Updated router definitions diff --git a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts index f6ac6f6d3..eb4836898 100644 --- a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts +++ b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts @@ -19,7 +19,11 @@ import { WebsocketServerTransport } from './transport/WebSocketServerTransport.j import * as framework from '@powersync/service-framework'; export class ReactiveSocketRouter { - constructor(protected options?: ReactiveSocketRouterOptions) {} + protected activeConnections: number; + + constructor(protected options?: ReactiveSocketRouterOptions) { + this.activeConnections = 0; + } reactiveStream(path: string, stream: IReactiveStreamInput): IReactiveStream { return { @@ -55,6 +59,14 @@ export class ReactiveSocketRouter { transport, acceptor: { accept: async (payload) => { + const { max_concurrent_connections } = this.options ?? {}; + if (max_concurrent_connections && this.activeConnections >= max_concurrent_connections) { + throw new framework.errors.JourneyError({ + code: '429', + description: `Maximum active concurrent connections limit has been reached` + }); + } + // Throwing an exception in this context will be returned to the client side request if (!payload.metadata) { // Meta data is required for endpoint handler path matching @@ -74,8 +86,10 @@ export class ReactiveSocketRouter { responder.onComplete(); }); + this.activeConnections++; return { cancel: () => { + this.activeConnections--; observer.triggerCancel(); }, onExtension: () => observer.triggerExtension(), diff --git a/packages/rsocket-router/src/router/types.ts b/packages/rsocket-router/src/router/types.ts index 33ca033b8..4c581d82e 100644 --- a/packages/rsocket-router/src/router/types.ts +++ b/packages/rsocket-router/src/router/types.ts @@ -1,6 +1,8 @@ -import { router as micro_router } from '@journeyapps-platform/micro'; import * as t from 'ts-codec'; +import * as framework from '@powersync/service-framework'; + import { OnExtensionSubscriber, OnNextSubscriber, OnTerminalSubscriber } from 'rsocket-core'; + import { SocketRouterObserver } from './SocketRouterListener.js'; export enum RS_ENDPOINT_TYPE { @@ -14,7 +16,9 @@ export const RSocketRequestMeta = t.object({ export type RequestMeta = t.Decoded; -export type ReactiveSocketRouterOptions = {}; +export type ReactiveSocketRouterOptions = { + max_concurrent_connections?: number; +}; export type SocketResponder = OnTerminalSubscriber & OnNextSubscriber & OnExtensionSubscriber; @@ -27,12 +31,18 @@ export type ReactiveStreamPayload = CommonStreamPayload & { initialN: number; }; -export type IReactiveStream = micro_router.Endpoint< - I, - O, - C, - micro_router.EndpointHandlerPayload & CommonStreamPayload, - micro_router.EndpointHandler & ReactiveStreamPayload, undefined> +export type IReactiveStream = Omit< + framework.router.Endpoint< + I, + O, + C, + framework.router.EndpointHandlerPayload & CommonStreamPayload, + framework.router.EndpointHandler< + framework.router.EndpointHandlerPayload & ReactiveStreamPayload, + undefined + > + >, + 'method' > & { type: RS_ENDPOINT_TYPE.STREAM; /** @@ -42,7 +52,7 @@ export type IReactiveStream = micro_router.Endpoint< decoder?: (rawData?: Buffer) => Promise; }; -export type IReactiveStreamInput = Omit, 'path' | 'type'>; +export type IReactiveStreamInput = Omit, 'path' | 'type' | 'method'>; export type ReactiveEndpoint = IReactiveStream; diff --git a/packages/rsocket-router/tests/src/requests.test.ts b/packages/rsocket-router/tests/src/requests.test.ts index 4594a9aed..165879cc5 100644 --- a/packages/rsocket-router/tests/src/requests.test.ts +++ b/packages/rsocket-router/tests/src/requests.test.ts @@ -1,4 +1,3 @@ -import '@journeyapps-platform/micro/register'; import { describe, expect, it, vi } from 'vitest'; import { createMockObserver, createMockResponder } from './utils/mock-responder.js'; import { handleReactiveStream } from '../../src/router/ReactiveSocketRouter.js'; diff --git a/packages/rsocket-router/tests/src/socket.test.ts b/packages/rsocket-router/tests/src/socket.test.ts index 689f5d493..fb87482d1 100644 --- a/packages/rsocket-router/tests/src/socket.test.ts +++ b/packages/rsocket-router/tests/src/socket.test.ts @@ -1,4 +1,3 @@ -import '@journeyapps-platform/micro/register'; import * as WebSocket from 'ws'; import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; import { RSocketConnector, RSocketServer } from 'rsocket-core'; @@ -161,7 +160,7 @@ describe('Sockets', () => { const connector = new RSocketConnector({ transport: new WebsocketClientTransport({ url: WS_ADDRESS, - wsCreator: (url) => testSocket + wsCreator: (url) => testSocket as any }), setup: { diff --git a/packages/service-core/package.json b/packages/service-core/package.json index c0dbd41f3..3bfd761f1 100644 --- a/packages/service-core/package.json +++ b/packages/service-core/package.json @@ -23,17 +23,17 @@ "@opentelemetry/exporter-prometheus": "^0.51.1", "@opentelemetry/resources": "^1.24.1", "@opentelemetry/sdk-metrics": "1.24.1", + "@powersync/service-framework": "workspace:*", "@powersync/service-jpgwire": "workspace:*", "@powersync/service-jsonbig": "workspace:*", - "@powersync/service-framework": "workspace:*", "@powersync/service-rsocket-router": "workspace:*", "@powersync/service-sync-rules": "workspace:*", "@powersync/service-types": "workspace:*", + "async": "^3.2.4", "async-mutex": "^0.5.0", "bson": "^6.6.0", "commander": "^12.0.0", "cors": "^2.8.5", - "fastify": "4.23.2", "ipaddr.js": "^2.1.0", "ix": "^5.0.0", "jose": "^4.15.1", @@ -48,8 +48,11 @@ "yaml": "^2.3.2" }, "devDependencies": { + "@types/async": "^3.2.24", "@types/lodash": "^4.17.5", "@types/uuid": "^9.0.4", + "fastify": "4.23.2", + "fastify-plugin": "^4.5.1", "typescript": "^5.2.2", "vite-tsconfig-paths": "^4.3.2", "vitest": "^0.34.6" diff --git a/packages/service-core/src/routes/admin.ts b/packages/service-core/src/routes/admin.ts deleted file mode 100644 index 0465011d7..000000000 --- a/packages/service-core/src/routes/admin.ts +++ /dev/null @@ -1,231 +0,0 @@ -import * as framework from '@powersync/service-framework'; -import { SqlSyncRules, SqliteValue, StaticSchema, isJsonValue, toSyncRulesValue } from '@powersync/service-sync-rules'; -import { internal_routes } from '@powersync/service-types'; - -import * as api from '../api/api-index.js'; -import * as util from '../util/util-index.js'; - -import { RouteGenerator } from './router.js'; -import { PersistedSyncRulesContent } from '../storage/BucketStorage.js'; -import { authApi } from './auth.js'; - -const demoCredentials: RouteGenerator = (router) => - router.post('/api/admin/v1/demo-credentials', { - authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.DemoCredentialsRequest, { - allowAdditional: true - }), - handler: async (payload) => { - const connection = payload.context.system.config.connection; - if (connection == null || !connection.demo_database) { - return internal_routes.DemoCredentialsResponse.encode({}); - } - - const uri = util.buildDemoPgUri(connection); - return internal_routes.DemoCredentialsResponse.encode({ - credentials: { - postgres_uri: uri - } - }); - } - }); - -export const executeSql: RouteGenerator = (router) => - router.post('/api/admin/v1/execute-sql', { - authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.ExecuteSqlRequest, { allowAdditional: true }), - handler: async (payload) => { - const connection = payload.context.system.config.connection; - if (connection == null || !connection.debug_api) { - return internal_routes.ExecuteSqlResponse.encode({ - results: { - columns: [], - rows: [] - }, - success: false, - error: 'SQL querying is not enabled' - }); - } - - const pool = payload.context.system.requirePgPool(); - - const { query, args } = payload.params.sql; - - try { - const result = await pool.query({ - statement: query, - params: args.map(util.autoParameter) - }); - - return internal_routes.ExecuteSqlResponse.encode({ - success: true, - results: { - columns: result.columns.map((c) => c.name), - rows: result.rows.map((row) => { - return row.map((value) => mapColumnValue(toSyncRulesValue(value))); - }) - } - }); - } catch (e) { - return internal_routes.ExecuteSqlResponse.encode({ - results: { - columns: [], - rows: [] - }, - success: false, - error: e.message - }); - } - } - }); - -export const diagnostics: RouteGenerator = (router) => - router.post('/api/admin/v1/diagnostics', { - authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.DiagnosticsRequest, { allowAdditional: true }), - handler: async (payload) => { - const include_content = payload.params.sync_rules_content ?? false; - const system = payload.context.system; - - const status = await api.getConnectionStatus(system); - if (status == null) { - return internal_routes.DiagnosticsResponse.encode({ - connections: [] - }); - } - - const { storage } = system; - const active = await storage.getActiveSyncRulesContent(); - const next = await storage.getNextSyncRulesContent(); - - const active_status = await api.getSyncRulesStatus(active, system, { - include_content, - check_connection: status.connected, - live_status: true - }); - - const next_status = await api.getSyncRulesStatus(next, system, { - include_content, - check_connection: status.connected, - live_status: true - }); - - return internal_routes.DiagnosticsResponse.encode({ - connections: [status], - active_sync_rules: active_status, - deploying_sync_rules: next_status - }); - } - }); - -export const getSchema: RouteGenerator = (router) => - router.post('/api/admin/v1/schema', { - authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.GetSchemaRequest, { allowAdditional: true }), - handler: async (payload) => { - const system = payload.context.system; - - return internal_routes.GetSchemaResponse.encode(await api.getConnectionsSchema(system)); - } - }); - -export const reprocess: RouteGenerator = (router) => - router.post('/api/admin/v1/reprocess', { - authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.ReprocessRequest, { allowAdditional: true }), - handler: async (payload) => { - const system = payload.context.system; - - const storage = system.storage; - const next = await storage.getNextSyncRules(); - if (next != null) { - throw new Error(`Busy processing sync rules - cannot reprocess`); - } - - const active = await storage.getActiveSyncRules(); - if (active == null) { - throw new framework.errors.JourneyError({ - status: 422, - code: 'NO_SYNC_RULES', - description: 'No active sync rules' - }); - } - - const new_rules = await storage.updateSyncRules({ - content: active.sync_rules.content - }); - - return internal_routes.ReprocessResponse.encode({ - connections: [ - { - tag: system.config.connection!.tag, - id: system.config.connection!.id, - slot_name: new_rules.slot_name - } - ] - }); - } - }); - -export const validate: RouteGenerator = (router) => - router.post('/api/admin/v1/validate', { - authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.ValidateRequest, { allowAdditional: true }), - handler: async (payload) => { - const system = payload.context.system; - - const content = payload.params.sync_rules; - - const schemaData = await api.getConnectionsSchema(system); - const schema = new StaticSchema(schemaData.connections); - - const sync_rules: PersistedSyncRulesContent = { - // Dummy values - id: 0, - slot_name: '', - - parsed() { - return { - ...this, - sync_rules: SqlSyncRules.fromYaml(content, { throwOnError: false, schema }) - }; - }, - sync_rules_content: content, - async lock() { - throw new Error('Lock not implemented'); - } - }; - - const connectionStatus = await api.getConnectionStatus(system); - if (connectionStatus == null) { - return internal_routes.ValidateResponse.encode({ - errors: [{ level: 'fatal', message: 'No connection configured' }], - connections: [] - }); - } - - const status = (await api.getSyncRulesStatus(sync_rules, system, { - include_content: false, - check_connection: connectionStatus?.connected, - live_status: false - }))!; - - if (connectionStatus == null) { - status.errors.push({ level: 'fatal', message: 'No connection configured' }); - } - - return internal_routes.ValidateResponse.encode(status); - } - }); - -function mapColumnValue(value: SqliteValue) { - if (typeof value == 'bigint') { - return Number(value); - } else if (isJsonValue(value)) { - return value; - } else { - return null; - } -} - -export const admin_routes = [demoCredentials, executeSql, diagnostics, getSchema, reprocess, validate]; diff --git a/packages/service-core/src/routes/auth.ts b/packages/service-core/src/routes/auth.ts index a8955ef61..896811b71 100644 --- a/packages/service-core/src/routes/auth.ts +++ b/packages/service-core/src/routes/auth.ts @@ -1,19 +1,17 @@ -import * as micro from '@journeyapps-platform/micro'; -import { FastifyRequest } from 'fastify'; import * as jose from 'jose'; import * as auth from '../auth/auth-index.js'; import * as util from '../util/util-index.js'; -import { Context } from './router.js'; +import { BasicRouterRequest, Context, RequestEndpointHandlerPayload } from './router.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; -export function endpoint(req: FastifyRequest) { +export function endpoint(req: BasicRouterRequest) { const protocol = req.headers['x-forwarded-proto'] ?? req.protocol; const host = req.hostname; return `${protocol}://${host}`; } -function devAudience(req: FastifyRequest): string { +function devAudience(req: BasicRouterRequest): string { return `${endpoint(req)}/dev`; } @@ -22,7 +20,7 @@ function devAudience(req: FastifyRequest): string { * * Will be replaced by temporary tokens issued by PowerSync Management service. */ -export async function issueDevToken(req: FastifyRequest, user_id: string, config: util.ResolvedPowerSyncConfig) { +export async function issueDevToken(req: BasicRouterRequest, user_id: string, config: util.ResolvedPowerSyncConfig) { const iss = devAudience(req); const aud = devAudience(req); @@ -42,7 +40,11 @@ export async function issueDevToken(req: FastifyRequest, user_id: string, config } /** @deprecated */ -export async function issueLegacyDevToken(req: FastifyRequest, user_id: string, config: util.ResolvedPowerSyncConfig) { +export async function issueLegacyDevToken( + req: BasicRouterRequest, + user_id: string, + config: util.ResolvedPowerSyncConfig +) { const iss = devAudience(req); const aud = config.jwt_audiences[0]; @@ -61,7 +63,11 @@ export async function issueLegacyDevToken(req: FastifyRequest, user_id: string, .sign(key.key); } -export async function issuePowerSyncToken(req: FastifyRequest, user_id: string, config: util.ResolvedPowerSyncConfig) { +export async function issuePowerSyncToken( + req: BasicRouterRequest, + user_id: string, + config: util.ResolvedPowerSyncConfig +) { const iss = devAudience(req); const aud = config.jwt_audiences[0]; const key = config.dev.dev_key; @@ -89,8 +95,8 @@ export function getTokenFromHeader(authHeader: string = ''): string | null { return token ?? null; } -export const authUser = async (payload: micro.fastify.FastifyHandlerPayload) => { - return authorizeUser(payload.context, payload.request.headers.authorization); +export const authUser = async (payload: RequestEndpointHandlerPayload) => { + return authorizeUser(payload.context, payload.request.headers.authorization as string); }; export async function authorizeUser(context: Context, authHeader: string = '') { @@ -142,9 +148,9 @@ export async function generateContext(system: CorePowerSyncSystem, token: string /** * @deprecated */ -export const authDevUser = async (payload: micro.fastify.FastifyHandlerPayload) => { +export const authDevUser = async (payload: RequestEndpointHandlerPayload) => { const context = payload.context; - const token = getTokenFromHeader(payload.request.headers.authorization); + const token = getTokenFromHeader(payload.request.headers.authorization as string); if (!context.system.config.dev.demo_auth) { return { authorized: false, @@ -179,7 +185,7 @@ export const authDevUser = async (payload: micro.fastify.FastifyHandlerPayload) => { +export const authApi = (payload: RequestEndpointHandlerPayload) => { const context = payload.context; const api_keys = context.system.config.api_tokens; if (api_keys.length == 0) { @@ -188,7 +194,7 @@ export const authApi = (payload: micro.fastify.FastifyHandlerPayload - router.get('/write-checkpoint.json', { - authorize: authUser, - validator: framework.schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), - handler: async (payload) => { - const system = payload.context.system; - const storage = system.storage; - - const checkpoint = await util.getClientCheckpoint(system.requirePgPool(), storage); - return { - checkpoint - }; - } - }); - -export const writeCheckpoint2: RouteGenerator = (router) => - router.get('/write-checkpoint2.json', { - authorize: authUser, - validator: framework.schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), - handler: async (payload) => { - const { user_id, system } = payload.context; - const storage = system.storage; - const write_checkpoint = await util.createWriteCheckpoint(system.requirePgPool(), storage, user_id!); - return { - write_checkpoint: String(write_checkpoint) - }; - } - }); diff --git a/packages/service-core/src/routes/dev.ts b/packages/service-core/src/routes/dev.ts deleted file mode 100644 index 11a800ee4..000000000 --- a/packages/service-core/src/routes/dev.ts +++ /dev/null @@ -1,194 +0,0 @@ -import * as t from 'ts-codec'; -import * as framework from '@powersync/service-framework'; -import * as pgwire from '@powersync/service-jpgwire'; - -import * as util from '../util/util-index.js'; -import { authDevUser, authUser, endpoint, issueDevToken, issueLegacyDevToken, issuePowerSyncToken } from './auth.js'; -import { RouteGenerator } from './router.js'; - -const AuthParams = t.object({ - user: t.string, - password: t.string -}); - -// For legacy web client only. Remove soon. -export const auth: RouteGenerator = (router) => - router.post('/auth.json', { - validator: framework.schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), - handler: async (payload) => { - const { user, password } = payload.params; - const config = payload.context.system.config; - - if (config.dev.demo_auth == false || config.dev.demo_password == null) { - throw new framework.errors.AuthorizationError(['Demo auth disabled']); - } - - if (password == config.dev.demo_password) { - const token = await issueLegacyDevToken(payload.request, user, payload.context.system.config); - return { token, user_id: user, endpoint: endpoint(payload.request) }; - } else { - throw new framework.errors.AuthorizationError(['Authentication failed']); - } - } - }); - -export const auth2: RouteGenerator = (router) => - router.post('/dev/auth.json', { - validator: framework.schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), - handler: async (payload) => { - const { user, password } = payload.params; - const config = payload.context.system.config; - - if (config.dev.demo_auth == false || config.dev.demo_password == null) { - throw new framework.errors.AuthorizationError(['Demo auth disabled']); - } - - if (password == config.dev.demo_password) { - const token = await issueDevToken(payload.request, user, payload.context.system.config); - return { token, user_id: user }; - } else { - throw new framework.errors.AuthorizationError(['Authentication failed']); - } - } - }); - -const TokenParams = t.object({}); - -export const token: RouteGenerator = (router) => - router.post('/dev/token.json', { - validator: framework.schema.createTsCodecValidator(TokenParams, { allowAdditional: true }), - authorize: authDevUser, - handler: async (payload) => { - const { user_id } = payload.context; - const outToken = await issuePowerSyncToken(payload.request, user_id!, payload.context.system.config); - return { token: outToken, user_id: user_id, endpoint: endpoint(payload.request) }; - } - }); - -const OpType = { - PUT: 'PUT', - PATCH: 'PATCH', - DELETE: 'DELETE' -}; - -const CrudEntry = t.object({ - op: t.Enum(OpType), - type: t.string, - id: t.string, - op_id: t.number.optional(), - data: t.any.optional() -}); - -const CrudRequest = t.object({ - data: t.array(CrudEntry), - write_checkpoint: t.boolean.optional() -}); -export const crud: RouteGenerator = (router) => - router.post('/crud.json', { - validator: framework.schema.createTsCodecValidator(CrudRequest, { allowAdditional: true }), - authorize: authUser, - - handler: async (payload) => { - const { user_id, system } = payload.context; - - const pool = system.requirePgPool(); - - if (!system.config.dev.crud_api) { - throw new Error('CRUD api disabled'); - } - - const params = payload.params; - - let statements: pgwire.Statement[] = []; - - // Implementation note: - // Postgres does automatic "assigment cast" for query literals, - // e.g. a string literal to uuid. However, the same doesn't apply - // to query parameters. - // To handle those automatically, we use `json_populate_record` - // to automatically cast to the correct types. - - for (let op of params.data) { - const table = util.escapeIdentifier(op.type); - if (op.op == 'PUT') { - const data = op.data as Record; - const with_id = { ...data, id: op.id }; - - const columnsEscaped = Object.keys(with_id).map(util.escapeIdentifier); - const columnsJoined = columnsEscaped.join(', '); - - let updateClauses: string[] = []; - - for (let key of Object.keys(data)) { - updateClauses.push(`${util.escapeIdentifier(key)} = EXCLUDED.${util.escapeIdentifier(key)}`); - } - - const updateClause = updateClauses.length > 0 ? `DO UPDATE SET ${updateClauses.join(', ')}` : `DO NOTHING`; - - const statement = ` - WITH data_row AS ( - SELECT (json_populate_record(null::${table}, $1::json)).* - ) - INSERT INTO ${table} (${columnsJoined}) - SELECT ${columnsJoined} FROM data_row - ON CONFLICT(id) ${updateClause}`; - - statements.push({ - statement: statement, - params: [{ type: 'varchar', value: JSON.stringify(with_id) }] - }); - } else if (op.op == 'PATCH') { - const data = op.data as Record; - const with_id = { ...data, id: op.id }; - - let updateClauses: string[] = []; - - for (let key of Object.keys(data)) { - updateClauses.push(`${util.escapeIdentifier(key)} = data_row.${util.escapeIdentifier(key)}`); - } - - const statement = ` - WITH data_row AS ( - SELECT (json_populate_record(null::${table}, $1::json)).* - ) - UPDATE ${table} - SET ${updateClauses.join(', ')} - FROM data_row - WHERE ${table}.id = data_row.id`; - - statements.push({ - statement: statement, - params: [{ type: 'varchar', value: JSON.stringify(with_id) }] - }); - } else if (op.op == 'DELETE') { - statements.push({ - statement: ` - WITH data_row AS ( - SELECT (json_populate_record(null::${table}, $1::json)).* - ) - DELETE FROM ${table} - USING data_row - WHERE ${table}.id = data_row.id`, - params: [{ type: 'varchar', value: JSON.stringify({ id: op.id }) }] - }); - } - } - await pool.query(...statements); - - const storage = system.storage; - if (payload.params.write_checkpoint === true) { - const write_checkpoint = await util.createWriteCheckpoint(pool, storage, payload.context.user_id!); - return { write_checkpoint: String(write_checkpoint) }; - } else if (payload.params.write_checkpoint === false) { - return {}; - } else { - // Legacy - const checkpoint = await util.getClientCheckpoint(pool, storage); - return { - checkpoint - }; - } - } - }); - -export const dev_routes = [auth, auth2, token, crud]; diff --git a/packages/service-core/src/routes/endpoints/admin.ts b/packages/service-core/src/routes/endpoints/admin.ts new file mode 100644 index 000000000..b0b06e5f4 --- /dev/null +++ b/packages/service-core/src/routes/endpoints/admin.ts @@ -0,0 +1,237 @@ +import * as framework from '@powersync/service-framework'; +import { SqlSyncRules, SqliteValue, StaticSchema, isJsonValue, toSyncRulesValue } from '@powersync/service-sync-rules'; +import { internal_routes } from '@powersync/service-types'; + +import * as api from '../../api/api-index.js'; +import * as util from '../../util/util-index.js'; + +import { routeDefinition } from '../router.js'; +import { PersistedSyncRulesContent } from '../../storage/BucketStorage.js'; +import { authApi } from '../auth.js'; + +const demoCredentials = routeDefinition({ + path: '/api/admin/v1/demo-credentials', + method: framework.router.HTTPMethod.POST, + authorize: authApi, + validator: framework.schema.createTsCodecValidator(internal_routes.DemoCredentialsRequest, { + allowAdditional: true + }), + handler: async (payload) => { + const connection = payload.context.system.config.connection; + if (connection == null || !connection.demo_database) { + return internal_routes.DemoCredentialsResponse.encode({}); + } + + const uri = util.buildDemoPgUri(connection); + return internal_routes.DemoCredentialsResponse.encode({ + credentials: { + postgres_uri: uri + } + }); + } +}); + +export const executeSql = routeDefinition({ + path: '/api/admin/v1/execute-sql', + method: framework.router.HTTPMethod.POST, + authorize: authApi, + validator: framework.schema.createTsCodecValidator(internal_routes.ExecuteSqlRequest, { allowAdditional: true }), + handler: async (payload) => { + const connection = payload.context.system.config.connection; + if (connection == null || !connection.debug_api) { + return internal_routes.ExecuteSqlResponse.encode({ + results: { + columns: [], + rows: [] + }, + success: false, + error: 'SQL querying is not enabled' + }); + } + + const pool = payload.context.system.requirePgPool(); + + const { query, args } = payload.params.sql; + + try { + const result = await pool.query({ + statement: query, + params: args.map(util.autoParameter) + }); + + return internal_routes.ExecuteSqlResponse.encode({ + success: true, + results: { + columns: result.columns.map((c) => c.name), + rows: result.rows.map((row) => { + return row.map((value) => mapColumnValue(toSyncRulesValue(value))); + }) + } + }); + } catch (e) { + return internal_routes.ExecuteSqlResponse.encode({ + results: { + columns: [], + rows: [] + }, + success: false, + error: e.message + }); + } + } +}); + +export const diagnostics = routeDefinition({ + path: '/api/admin/v1/diagnostics', + method: framework.HTTPMethod.POST, + authorize: authApi, + validator: framework.schema.createTsCodecValidator(internal_routes.DiagnosticsRequest, { allowAdditional: true }), + handler: async (payload) => { + const include_content = payload.params.sync_rules_content ?? false; + const system = payload.context.system; + + const status = await api.getConnectionStatus(system); + if (status == null) { + return internal_routes.DiagnosticsResponse.encode({ + connections: [] + }); + } + + const { storage } = system; + const active = await storage.getActiveSyncRulesContent(); + const next = await storage.getNextSyncRulesContent(); + + const active_status = await api.getSyncRulesStatus(active, system, { + include_content, + check_connection: status.connected, + live_status: true + }); + + const next_status = await api.getSyncRulesStatus(next, system, { + include_content, + check_connection: status.connected, + live_status: true + }); + + return internal_routes.DiagnosticsResponse.encode({ + connections: [status], + active_sync_rules: active_status, + deploying_sync_rules: next_status + }); + } +}); + +export const getSchema = routeDefinition({ + path: '/api/admin/v1/schema', + method: framework.router.HTTPMethod.POST, + authorize: authApi, + validator: framework.schema.createTsCodecValidator(internal_routes.GetSchemaRequest, { allowAdditional: true }), + handler: async (payload) => { + const system = payload.context.system; + + return internal_routes.GetSchemaResponse.encode(await api.getConnectionsSchema(system)); + } +}); + +export const reprocess = routeDefinition({ + path: '/api/admin/v1/reprocess', + method: framework.router.HTTPMethod.POST, + authorize: authApi, + validator: framework.schema.createTsCodecValidator(internal_routes.ReprocessRequest, { allowAdditional: true }), + handler: async (payload) => { + const system = payload.context.system; + + const storage = system.storage; + const next = await storage.getNextSyncRules(); + if (next != null) { + throw new Error(`Busy processing sync rules - cannot reprocess`); + } + + const active = await storage.getActiveSyncRules(); + if (active == null) { + throw new framework.errors.JourneyError({ + status: 422, + code: 'NO_SYNC_RULES', + description: 'No active sync rules' + }); + } + + const new_rules = await storage.updateSyncRules({ + content: active.sync_rules.content + }); + + return internal_routes.ReprocessResponse.encode({ + connections: [ + { + tag: system.config.connection!.tag, + id: system.config.connection!.id, + slot_name: new_rules.slot_name + } + ] + }); + } +}); + +export const validate = routeDefinition({ + path: '/api/admin/v1/validate', + method: framework.router.HTTPMethod.POST, + authorize: authApi, + validator: framework.schema.createTsCodecValidator(internal_routes.ValidateRequest, { allowAdditional: true }), + handler: async (payload) => { + const system = payload.context.system; + + const content = payload.params.sync_rules; + + const schemaData = await api.getConnectionsSchema(system); + const schema = new StaticSchema(schemaData.connections); + + const sync_rules: PersistedSyncRulesContent = { + // Dummy values + id: 0, + slot_name: '', + + parsed() { + return { + ...this, + sync_rules: SqlSyncRules.fromYaml(content, { throwOnError: false, schema }) + }; + }, + sync_rules_content: content, + async lock() { + throw new Error('Lock not implemented'); + } + }; + + const connectionStatus = await api.getConnectionStatus(system); + if (connectionStatus == null) { + return internal_routes.ValidateResponse.encode({ + errors: [{ level: 'fatal', message: 'No connection configured' }], + connections: [] + }); + } + + const status = (await api.getSyncRulesStatus(sync_rules, system, { + include_content: false, + check_connection: connectionStatus?.connected, + live_status: false + }))!; + + if (connectionStatus == null) { + status.errors.push({ level: 'fatal', message: 'No connection configured' }); + } + + return internal_routes.ValidateResponse.encode(status); + } +}); + +function mapColumnValue(value: SqliteValue) { + if (typeof value == 'bigint') { + return Number(value); + } else if (isJsonValue(value)) { + return value; + } else { + return null; + } +} + +export const ADMIN_ROUTES = [demoCredentials, executeSql, diagnostics, getSchema, reprocess, validate]; diff --git a/packages/service-core/src/routes/endpoints/checkpointing.ts b/packages/service-core/src/routes/endpoints/checkpointing.ts new file mode 100644 index 000000000..44d7d59ff --- /dev/null +++ b/packages/service-core/src/routes/endpoints/checkpointing.ts @@ -0,0 +1,41 @@ +import * as t from 'ts-codec'; +import * as framework from '@powersync/service-framework'; + +import * as util from '../../util/util-index.js'; +import { authUser } from '../auth.js'; +import { routeDefinition } from '../router.js'; + +const WriteCheckpointRequest = t.object({}); + +export const writeCheckpoint = routeDefinition({ + path: '/write-checkpoint.json', + method: framework.router.HTTPMethod.GET, + authorize: authUser, + validator: framework.schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), + handler: async (payload) => { + const system = payload.context.system; + const storage = system.storage; + + const checkpoint = await util.getClientCheckpoint(system.requirePgPool(), storage); + return { + checkpoint + }; + } +}); + +export const writeCheckpoint2 = routeDefinition({ + path: '/write-checkpoint2.json', + method: framework.router.HTTPMethod.GET, + authorize: authUser, + validator: framework.schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), + handler: async (payload) => { + const { user_id, system } = payload.context; + const storage = system.storage; + const write_checkpoint = await util.createWriteCheckpoint(system.requirePgPool(), storage, user_id!); + return { + write_checkpoint: String(write_checkpoint) + }; + } +}); + +export const CHECKPOINT_ROUTES = [writeCheckpoint, writeCheckpoint2]; diff --git a/packages/service-core/src/routes/endpoints/dev.ts b/packages/service-core/src/routes/endpoints/dev.ts new file mode 100644 index 000000000..799d45ce4 --- /dev/null +++ b/packages/service-core/src/routes/endpoints/dev.ts @@ -0,0 +1,199 @@ +import * as t from 'ts-codec'; +import * as framework from '@powersync/service-framework'; +import * as pgwire from '@powersync/service-jpgwire'; + +import * as util from '../../util/util-index.js'; +import { authDevUser, authUser, endpoint, issueDevToken, issueLegacyDevToken, issuePowerSyncToken } from '../auth.js'; +import { routeDefinition } from '../router.js'; + +const AuthParams = t.object({ + user: t.string, + password: t.string +}); + +// For legacy web client only. Remove soon. +export const auth = routeDefinition({ + path: '/auth.json', + method: framework.router.HTTPMethod.POST, + validator: framework.schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), + handler: async (payload) => { + const { user, password } = payload.params; + const config = payload.context.system.config; + + if (config.dev.demo_auth == false || config.dev.demo_password == null) { + throw new framework.errors.AuthorizationError(['Demo auth disabled']); + } + + if (password == config.dev.demo_password) { + const token = await issueLegacyDevToken(payload.request, user, payload.context.system.config); + return { token, user_id: user, endpoint: endpoint(payload.request) }; + } else { + throw new framework.errors.AuthorizationError(['Authentication failed']); + } + } +}); + +export const auth2 = routeDefinition({ + path: '/dev/auth.json', + method: framework.router.HTTPMethod.POST, + validator: framework.schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), + handler: async (payload) => { + const { user, password } = payload.params; + const config = payload.context.system.config; + + if (config.dev.demo_auth == false || config.dev.demo_password == null) { + throw new framework.errors.AuthorizationError(['Demo auth disabled']); + } + + if (password == config.dev.demo_password) { + const token = await issueDevToken(payload.request, user, payload.context.system.config); + return { token, user_id: user }; + } else { + throw new framework.errors.AuthorizationError(['Authentication failed']); + } + } +}); + +const TokenParams = t.object({}); + +export const token = routeDefinition({ + path: '/dev/token.json', + method: framework.router.HTTPMethod.POST, + validator: framework.schema.createTsCodecValidator(TokenParams, { allowAdditional: true }), + authorize: authDevUser, + handler: async (payload) => { + const { user_id } = payload.context; + const outToken = await issuePowerSyncToken(payload.request, user_id!, payload.context.system.config); + return { token: outToken, user_id: user_id, endpoint: endpoint(payload.request) }; + } +}); + +const OpType = { + PUT: 'PUT', + PATCH: 'PATCH', + DELETE: 'DELETE' +}; + +const CrudEntry = t.object({ + op: t.Enum(OpType), + type: t.string, + id: t.string, + op_id: t.number.optional(), + data: t.any.optional() +}); + +const CrudRequest = t.object({ + data: t.array(CrudEntry), + write_checkpoint: t.boolean.optional() +}); + +export const crud = routeDefinition({ + path: '/crud.json', + method: framework.router.HTTPMethod.POST, + validator: framework.schema.createTsCodecValidator(CrudRequest, { allowAdditional: true }), + authorize: authUser, + + handler: async (payload) => { + const { user_id, system } = payload.context; + + const pool = system.requirePgPool(); + + if (!system.config.dev.crud_api) { + throw new Error('CRUD api disabled'); + } + + const params = payload.params; + + let statements: pgwire.Statement[] = []; + + // Implementation note: + // Postgres does automatic "assigment cast" for query literals, + // e.g. a string literal to uuid. However, the same doesn't apply + // to query parameters. + // To handle those automatically, we use `json_populate_record` + // to automatically cast to the correct types. + + for (let op of params.data) { + const table = util.escapeIdentifier(op.type); + if (op.op == 'PUT') { + const data = op.data as Record; + const with_id = { ...data, id: op.id }; + + const columnsEscaped = Object.keys(with_id).map(util.escapeIdentifier); + const columnsJoined = columnsEscaped.join(', '); + + let updateClauses: string[] = []; + + for (let key of Object.keys(data)) { + updateClauses.push(`${util.escapeIdentifier(key)} = EXCLUDED.${util.escapeIdentifier(key)}`); + } + + const updateClause = updateClauses.length > 0 ? `DO UPDATE SET ${updateClauses.join(', ')}` : `DO NOTHING`; + + const statement = ` + WITH data_row AS ( + SELECT (json_populate_record(null::${table}, $1::json)).* + ) + INSERT INTO ${table} (${columnsJoined}) + SELECT ${columnsJoined} FROM data_row + ON CONFLICT(id) ${updateClause}`; + + statements.push({ + statement: statement, + params: [{ type: 'varchar', value: JSON.stringify(with_id) }] + }); + } else if (op.op == 'PATCH') { + const data = op.data as Record; + const with_id = { ...data, id: op.id }; + + let updateClauses: string[] = []; + + for (let key of Object.keys(data)) { + updateClauses.push(`${util.escapeIdentifier(key)} = data_row.${util.escapeIdentifier(key)}`); + } + + const statement = ` + WITH data_row AS ( + SELECT (json_populate_record(null::${table}, $1::json)).* + ) + UPDATE ${table} + SET ${updateClauses.join(', ')} + FROM data_row + WHERE ${table}.id = data_row.id`; + + statements.push({ + statement: statement, + params: [{ type: 'varchar', value: JSON.stringify(with_id) }] + }); + } else if (op.op == 'DELETE') { + statements.push({ + statement: ` + WITH data_row AS ( + SELECT (json_populate_record(null::${table}, $1::json)).* + ) + DELETE FROM ${table} + USING data_row + WHERE ${table}.id = data_row.id`, + params: [{ type: 'varchar', value: JSON.stringify({ id: op.id }) }] + }); + } + } + await pool.query(...statements); + + const storage = system.storage; + if (payload.params.write_checkpoint === true) { + const write_checkpoint = await util.createWriteCheckpoint(pool, storage, payload.context.user_id!); + return { write_checkpoint: String(write_checkpoint) }; + } else if (payload.params.write_checkpoint === false) { + return {}; + } else { + // Legacy + const checkpoint = await util.getClientCheckpoint(pool, storage); + return { + checkpoint + }; + } + } +}); + +export const DEV_ROUTES = [auth, auth2, token, crud]; diff --git a/packages/service-core/src/routes/endpoints/route-endpoints-index.ts b/packages/service-core/src/routes/endpoints/route-endpoints-index.ts new file mode 100644 index 000000000..e20ac7c52 --- /dev/null +++ b/packages/service-core/src/routes/endpoints/route-endpoints-index.ts @@ -0,0 +1,6 @@ +export * from './admin.js'; +export * from './checkpointing.js'; +export * from './dev.js'; +export * from './socket-route.js'; +export * from './sync-rules.js'; +export * from './sync-stream.js'; diff --git a/packages/service-core/src/routes/socket-route.ts b/packages/service-core/src/routes/endpoints/socket-route.ts similarity index 91% rename from packages/service-core/src/routes/socket-route.ts rename to packages/service-core/src/routes/endpoints/socket-route.ts index 0f905c854..65ad838b3 100644 --- a/packages/service-core/src/routes/socket-route.ts +++ b/packages/service-core/src/routes/endpoints/socket-route.ts @@ -2,14 +2,13 @@ import { serialize } from 'bson'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; import * as framework from '@powersync/service-framework'; -import * as util from '../util/util-index.js'; -import { streamResponse } from '../sync/sync.js'; +import * as util from '../../util/util-index.js'; +import { streamResponse } from '../../sync/sync.js'; import { SyncRoutes } from './sync-stream.js'; -import { SocketRouteGenerator } from './router-socket.js'; -import { Metrics } from '../metrics/Metrics.js'; -import { logger } from '@powersync/service-framework'; +import { SocketRouteGenerator } from '../router-socket.js'; +import { Metrics } from '../../metrics/Metrics.js'; -export const sync_stream_reactive: SocketRouteGenerator = (router) => +export const syncStreamReactive: SocketRouteGenerator = (router) => router.reactiveStream(SyncRoutes.STREAM, { authorize: ({ context }) => { return { @@ -120,7 +119,7 @@ export const sync_stream_reactive: SocketRouteGenerator = (router) => // Convert to our standard form before responding. // This ensures the error can be serialized. const error = new framework.errors.InternalServerError(ex); - logger.error('Sync stream error', error); + framework.logger.error('Sync stream error', error); responder.onError(error); } finally { responder.onComplete(); diff --git a/packages/service-core/src/routes/endpoints/sync-rules.ts b/packages/service-core/src/routes/endpoints/sync-rules.ts new file mode 100644 index 000000000..77e052639 --- /dev/null +++ b/packages/service-core/src/routes/endpoints/sync-rules.ts @@ -0,0 +1,227 @@ +import * as t from 'ts-codec'; +import { FastifyPluginAsync } from 'fastify'; +import * as framework from '@powersync/service-framework'; +import * as pgwire from '@powersync/service-jpgwire'; +import { SqlSyncRules, SyncRulesErrors } from '@powersync/service-sync-rules'; + +import * as replication from '../../replication/replication-index.js'; +import { authApi } from '../auth.js'; +import { routeDefinition } from '../router.js'; + +const DeploySyncRulesRequest = t.object({ + content: t.string +}); + +export const yamlPlugin: FastifyPluginAsync = async (fastify) => { + fastify.addContentTypeParser('application/yaml', async (request, payload, _d) => { + const data: any[] = []; + for await (const chunk of payload) { + data.push(chunk); + } + + request.params = { content: Buffer.concat(data).toString('utf8') }; + }); +}; + +/** + * Declares the plugin should be available on the same scope + * without requiring the `fastify-plugin` package as a dependency. + * https://fastify.dev/docs/latest/Reference/Plugins/#handle-the-scope + */ +//@ts-expect-error +yamlPlugin[Symbol.for('skip-override')] = true; + +export const deploySyncRules = routeDefinition({ + path: '/api/sync-rules/v1/deploy', + method: framework.router.HTTPMethod.POST, + authorize: authApi, + parse: true, + plugins: [yamlPlugin], + validator: framework.schema.createTsCodecValidator(DeploySyncRulesRequest, { allowAdditional: true }), + handler: async (payload) => { + if (payload.context.system.config.sync_rules.present) { + // If sync rules are configured via the config, disable deploy via the API. + throw new framework.errors.JourneyError({ + status: 422, + code: 'API_DISABLED', + description: 'Sync rules API disabled', + details: 'Use the management API to deploy sync rules' + }); + } + const content = payload.params.content; + + try { + SqlSyncRules.fromYaml(payload.params.content); + } catch (e) { + throw new framework.errors.JourneyError({ + status: 422, + code: 'INVALID_SYNC_RULES', + description: 'Sync rules parsing failed', + details: e.message + }); + } + + const sync_rules = await payload.context.system.storage.updateSyncRules({ + content: content + }); + + return { + slot_name: sync_rules.slot_name + }; + } +}); + +const ValidateSyncRulesRequest = t.object({ + content: t.string +}); + +export const validateSyncRules = routeDefinition({ + path: '/api/sync-rules/v1/validate', + method: framework.router.HTTPMethod.POST, + authorize: authApi, + parse: true, + plugins: [yamlPlugin], + validator: framework.schema.createTsCodecValidator(ValidateSyncRulesRequest, { allowAdditional: true }), + handler: async (payload) => { + const content = payload.params.content; + + const info = await debugSyncRules(payload.context.system.requirePgPool(), content); + + return replyPrettyJson(info); + } +}); + +export const currentSyncRules = routeDefinition({ + path: '/api/sync-rules/v1/current', + method: framework.router.HTTPMethod.GET, + authorize: authApi, + handler: async (payload) => { + const storage = payload.context.system.storage; + const sync_rules = await storage.getActiveSyncRulesContent(); + if (!sync_rules) { + throw new framework.errors.JourneyError({ + status: 422, + code: 'NO_SYNC_RULES', + description: 'No active sync rules' + }); + } + const info = await debugSyncRules(payload.context.system.requirePgPool(), sync_rules.sync_rules_content); + const next = await storage.getNextSyncRulesContent(); + + const next_info = next + ? await debugSyncRules(payload.context.system.requirePgPool(), next.sync_rules_content) + : null; + + const response = { + current: { + slot_name: sync_rules.slot_name, + content: sync_rules.sync_rules_content, + ...info + }, + next: + next == null + ? null + : { + slot_name: next.slot_name, + content: next.sync_rules_content, + ...next_info + } + }; + + return replyPrettyJson({ data: response }); + } +}); + +const ReprocessSyncRulesRequest = t.object({}); + +export const reprocessSyncRules = routeDefinition({ + path: '/api/sync-rules/v1/reprocess', + method: framework.router.HTTPMethod.POST, + authorize: authApi, + validator: framework.schema.createTsCodecValidator(ReprocessSyncRulesRequest), + handler: async (payload) => { + const storage = payload.context.system.storage; + const sync_rules = await storage.getActiveSyncRules(); + if (sync_rules == null) { + throw new framework.errors.JourneyError({ + status: 422, + code: 'NO_SYNC_RULES', + description: 'No active sync rules' + }); + } + + const new_rules = await storage.updateSyncRules({ + content: sync_rules.sync_rules.content + }); + return { + slot_name: new_rules.slot_name + }; + } +}); + +export const SYNC_RULES_ROUTES = [validateSyncRules, deploySyncRules, reprocessSyncRules, currentSyncRules]; + +function replyPrettyJson(payload: any) { + return new framework.router.RouterResponse({ + status: 200, + data: JSON.stringify(payload, null, 2) + '\n', + headers: { 'Content-Type': 'application/json' } + }); +} + +async function debugSyncRules(db: pgwire.PgClient, sync_rules: string) { + try { + const rules = SqlSyncRules.fromYaml(sync_rules); + const source_table_patterns = rules.getSourceTables(); + const wc = new replication.WalConnection({ + db: db, + sync_rules: rules + }); + const resolved_tables = await wc.getDebugTablesInfo(source_table_patterns); + + return { + valid: true, + bucket_definitions: rules.bucket_descriptors.map((d) => { + let all_parameter_queries = [...d.parameter_queries.values()].flat(); + let all_data_queries = [...d.data_queries.values()].flat(); + return { + name: d.name, + bucket_parameters: d.bucket_parameters, + global_parameter_queries: d.global_parameter_queries.map((q) => { + return { + sql: q.sql + }; + }), + parameter_queries: all_parameter_queries.map((q) => { + return { + sql: q.sql, + table: q.sourceTable, + input_parameters: q.input_parameters + }; + }), + + data_queries: all_data_queries.map((q) => { + return { + sql: q.sql, + table: q.sourceTable, + columns: q.columnOutputNames() + }; + }) + }; + }), + source_tables: resolved_tables, + data_tables: rules.debugGetOutputTables() + }; + } catch (e) { + if (e instanceof SyncRulesErrors) { + return { + valid: false, + errors: e.errors.map((e) => e.message) + }; + } + return { + valid: false, + errors: [e.message] + }; + } +} diff --git a/packages/service-core/src/routes/endpoints/sync-stream.ts b/packages/service-core/src/routes/endpoints/sync-stream.ts new file mode 100644 index 000000000..267cc6ec2 --- /dev/null +++ b/packages/service-core/src/routes/endpoints/sync-stream.ts @@ -0,0 +1,98 @@ +import { Readable } from 'stream'; +import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; +import * as framework from '@powersync/service-framework'; + +import * as sync from '../../sync/sync-index.js'; +import * as util from '../../util/util-index.js'; + +import { authUser } from '../auth.js'; +import { routeDefinition } from '../router.js'; +import { Metrics } from '../../metrics/Metrics.js'; + +export enum SyncRoutes { + STREAM = '/sync/stream' +} + +export const syncStreamed = routeDefinition({ + path: SyncRoutes.STREAM, + method: framework.router.HTTPMethod.POST, + authorize: authUser, + validator: framework.schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }), + handler: async (payload) => { + const system = payload.context.system; + + if (system.closed) { + throw new framework.errors.JourneyError({ + status: 503, + code: 'SERVICE_UNAVAILABLE', + description: 'Service temporarily unavailable' + }); + } + + const params: util.StreamingSyncRequest = payload.params; + const syncParams: SyncParameters = normalizeTokenParameters(payload.context.token_payload!.parameters ?? {}); + + const storage = system.storage; + // Sanity check before we start the stream + const cp = await storage.getActiveCheckpoint(); + if (!cp.hasSyncRules()) { + throw new framework.errors.JourneyError({ + status: 500, + code: 'NO_SYNC_RULES', + description: 'No sync rules available' + }); + } + const controller = new AbortController(); + try { + Metrics.getInstance().concurrent_connections.add(1); + const stream = Readable.from( + sync.transformToBytesTracked( + sync.ndjson( + sync.streamResponse({ + storage, + params, + syncParams, + token: payload.context.token_payload!, + signal: controller.signal + }) + ) + ), + { objectMode: false, highWaterMark: 16 * 1024 } + ); + + const deregister = system.addStopHandler(() => { + // This error is not currently propagated to the client + controller.abort(); + stream.destroy(new Error('Shutting down system')); + }); + stream.on('close', () => { + deregister(); + }); + + stream.on('error', (error) => { + controller.abort(); + // Note: This appears as a 200 response in the logs. + if (error.message != 'Shutting down system') { + framework.logger.error('Streaming sync request failed', error); + } + }); + + return new framework.router.RouterResponse({ + status: 200, + headers: { + 'Content-Type': 'application/x-ndjson' + }, + data: stream, + afterSend: async () => { + controller.abort(); + Metrics.getInstance().concurrent_connections.add(-1); + } + }); + } catch (ex) { + controller.abort(); + Metrics.getInstance().concurrent_connections.add(-1); + } + } +}); + +export const SYNC_STREAM_ROUTES = [syncStreamed]; diff --git a/packages/service-core/src/routes/hooks.ts b/packages/service-core/src/routes/hooks.ts new file mode 100644 index 000000000..77dd7e3ac --- /dev/null +++ b/packages/service-core/src/routes/hooks.ts @@ -0,0 +1,47 @@ +import type fastify from 'fastify'; +import a from 'async'; + +import * as framework from '@powersync/service-framework'; + +export type CreateRequestQueueParams = { + max_queue_depth: number; + concurrency: number; +}; + +/** + * Creates a request queue which limits the amount of concurrent connections which + * are active at any time. + */ +export const createRequestQueueHook = (params: CreateRequestQueueParams): fastify.onRequestHookHandler => { + const request_queue = a.queue<() => Promise>((event, done) => { + event().finally(done); + }, params.concurrency); + + return (request, reply, next) => { + if ( + (params.max_queue_depth == 0 && request_queue.running() == params.concurrency) || + request_queue.length() >= params.max_queue_depth + ) { + framework.logger.warn(`${request.method} ${request.url}`, { + status: 429, + method: request.method, + path: request.url, + route: request.routerPath, + queue_overflow: true + }); + return reply.status(429).send(); + } + + const finished = new Promise((resolve) => { + reply.then( + () => resolve(), + () => resolve() + ); + }); + + request_queue.push(() => { + next(); + return finished; + }); + }; +}; diff --git a/packages/service-core/src/routes/route-generators.ts b/packages/service-core/src/routes/route-generators.ts deleted file mode 100644 index b6db5bf8f..000000000 --- a/packages/service-core/src/routes/route-generators.ts +++ /dev/null @@ -1,39 +0,0 @@ -import * as micro from '@journeyapps-platform/micro'; - -import { Context, RouteGenerator } from './router.js'; -import { admin_routes } from './admin.js'; -import { writeCheckpoint, writeCheckpoint2 } from './checkpointing.js'; -import { dev_routes } from './dev.js'; -import { syncRulesRoutes } from './sync-rules.js'; -import { IReactiveStream, ReactiveSocketRouter } from '@powersync/service-rsocket-router'; -import { sync_stream_reactive } from './socket-route.js'; -import { syncStreamed } from './sync-stream.js'; - -/** - * Generates router endpoints using the specified router instance - */ -export const generateHTTPRoutes = (router: micro.fastify.FastifyRouter): micro.router.Route[] => { - const generators: RouteGenerator[] = [ - ...dev_routes, - writeCheckpoint, - writeCheckpoint2, - ...syncRulesRoutes, - ...admin_routes - ]; - - return generators.map((generateRoute) => generateRoute(router)); -}; - -/** - * Generates route endpoint for HTTP sync streaming - */ -export const generateHTTPStreamRoutes = (router: micro.fastify.FastifyRouter): micro.router.Route[] => { - return [syncStreamed].map((generateRoute) => generateRoute(router)); -}; - -/** - * Generates socket router endpoints using the specified router instance - */ -export const generateSocketRoutes = (router: ReactiveSocketRouter): IReactiveStream[] => { - return [sync_stream_reactive].map((generateRoute) => generateRoute(router)); -}; diff --git a/packages/service-core/src/routes/route-register.ts b/packages/service-core/src/routes/route-register.ts new file mode 100644 index 000000000..ccc5fba98 --- /dev/null +++ b/packages/service-core/src/routes/route-register.ts @@ -0,0 +1,106 @@ +import fastify from 'fastify'; + +import * as framework from '@powersync/service-framework'; +import { Context, ContextProvider, RequestEndpoint, RequestEndpointHandlerPayload } from './router.js'; + +export type FastifyEndpoint = RequestEndpoint & { + parse?: boolean; + plugins?: fastify.FastifyPluginAsync[]; +}; + +/** + * Registers endpoint definitions as routes on a Fastify app instance. + */ +export function registerFastifyRoutes( + app: fastify.FastifyInstance, + contextProvider: ContextProvider, + endpoints: FastifyEndpoint[] +) { + for (const e of endpoints) { + // Create a new context for each route + app.register(async function (fastify) { + fastify.route({ + url: e.path, + method: e.method as framework.HTTPMethod, + handler: async (request, reply) => { + const startTime = new Date(); + let response: framework.router.RouterResponse; + try { + const context = await contextProvider(request); + + let combined = { + ...(request.params as any), + ...(request.query as any) + }; + + if (typeof request.body === 'object' && !Buffer.isBuffer(request.body) && !Array.isArray(request.body)) { + combined = { + ...combined, + ...request.body + }; + } + + const payload: RequestEndpointHandlerPayload = { + context: context, + params: combined, + request + }; + + const endpointResponse = await framework.router.executeEndpoint(e, payload); + + if (framework.RouterResponse.isRouterResponse(endpointResponse)) { + response = endpointResponse; + } else if (framework.router.isAsyncIterable(endpointResponse) || Buffer.isBuffer(endpointResponse)) { + response = new framework.router.RouterResponse({ + status: 200, + data: endpointResponse + }); + } else { + response = new framework.router.RouterResponse({ + status: 200, + data: { data: endpointResponse } + }); + } + } catch (ex) { + const journeyError = framework.errors.JourneyError.isJourneyError(ex) + ? ex + : new framework.errors.InternalServerError(ex); + + response = new framework.RouterResponse({ + status: journeyError.errorData.status || 500, + headers: { + 'Content-Type': 'application/json' + }, + data: { + error: journeyError.errorData + } + }); + } + + Object.keys(response.headers).forEach((key) => { + reply.header(key, response.headers[key]); + }); + reply.status(response.status); + try { + await reply.send(response.data); + } finally { + await response.afterSend?.(); + framework.logger.info(`${e.method} ${request.url}`, { + duration_ms: Math.round(new Date().valueOf() - startTime.valueOf() + Number.EPSILON), + status: response.status, + method: e.method, + path: request.url, + route: e.path + }); + } + } + }); + + if (!(e.parse ?? true)) { + fastify.removeAllContentTypeParsers(); + } + + e.plugins?.forEach((plugin) => fastify.register(plugin)); + }); + } +} diff --git a/packages/service-core/src/routes/router.ts b/packages/service-core/src/routes/router.ts index 9e2e393a6..c83cffaf4 100644 --- a/packages/service-core/src/routes/router.ts +++ b/packages/service-core/src/routes/router.ts @@ -1,8 +1,10 @@ -import * as micro from '@journeyapps-platform/micro'; - +import * as framework from '@powersync/service-framework'; import * as auth from '../auth/auth-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; +/** + * Common context for routes + */ export type Context = { user_id?: string; system: CorePowerSyncSystem; @@ -11,8 +13,34 @@ export type Context = { token_errors?: string[]; }; +export type BasicRouterRequest = { + headers: Record; + protocol: string; + hostname: string; +}; + +export type ContextProvider = (request: BasicRouterRequest) => Promise; + +export type RequestEndpoint< + I, + O, + C = Context, + Payload = RequestEndpointHandlerPayload +> = framework.router.Endpoint & {}; + +export type RequestEndpointHandlerPayload< + I = any, + C = Context, + Request = BasicRouterRequest +> = framework.router.EndpointHandlerPayload & { + request: Request; +}; + /** - * Creates a route handler given a router instance - * TODO move away from Fastify specific types + * Helper function for making generics work well when defining routes */ -export type RouteGenerator = (router: micro.fastify.FastifyRouter) => micro.router.Route; +export function routeDefinition( + params: RequestEndpoint & Extension +): RequestEndpoint & Extension { + return params; +} diff --git a/packages/service-core/src/routes/routes-index.ts b/packages/service-core/src/routes/routes-index.ts index 8af929734..fe07904a6 100644 --- a/packages/service-core/src/routes/routes-index.ts +++ b/packages/service-core/src/routes/routes-index.ts @@ -1,5 +1,6 @@ -export * from './router.js'; -export * from './router-socket.js'; -export * from './route-generators.js'; - export * as auth from './auth.js'; +export * as endpoints from './endpoints/route-endpoints-index.js'; +export * as hooks from './hooks.js'; +export * from './route-register.js'; +export * from './router-socket.js'; +export * from './router.js'; diff --git a/packages/service-core/src/routes/sync-rules.ts b/packages/service-core/src/routes/sync-rules.ts deleted file mode 100644 index 631a73e00..000000000 --- a/packages/service-core/src/routes/sync-rules.ts +++ /dev/null @@ -1,211 +0,0 @@ -import * as t from 'ts-codec'; -import { FastifyPluginAsync, FastifyReply } from 'fastify'; -import * as micro from '@journeyapps-platform/micro'; -import * as framework from '@powersync/service-framework'; -import * as pgwire from '@powersync/service-jpgwire'; -import { SqlSyncRules, SyncRulesErrors } from '@powersync/service-sync-rules'; - -import * as replication from '../replication/replication-index.js'; -import { authApi } from './auth.js'; -import { RouteGenerator } from './router.js'; - -const DeploySyncRulesRequest = t.object({ - content: t.string -}); - -const yamlPlugin: FastifyPluginAsync = async (fastify) => { - fastify.addContentTypeParser('application/yaml', async (request, payload, _d) => { - const data = await micro.streaming.drain(payload); - - request.params = { content: Buffer.concat(data).toString('utf8') }; - }); -}; - -export const deploySyncRules: RouteGenerator = (router) => - router.post('/api/sync-rules/v1/deploy', { - authorize: authApi, - parse: true, - plugins: [yamlPlugin], - validator: framework.schema.createTsCodecValidator(DeploySyncRulesRequest, { allowAdditional: true }), - handler: async (payload) => { - if (payload.context.system.config.sync_rules.present) { - // If sync rules are configured via the config, disable deploy via the API. - throw new framework.errors.JourneyError({ - status: 422, - code: 'API_DISABLED', - description: 'Sync rules API disabled', - details: 'Use the management API to deploy sync rules' - }); - } - const content = payload.params.content; - - try { - SqlSyncRules.fromYaml(payload.params.content); - } catch (e) { - throw new framework.errors.JourneyError({ - status: 422, - code: 'INVALID_SYNC_RULES', - description: 'Sync rules parsing failed', - details: e.message - }); - } - - const sync_rules = await payload.context.system.storage.updateSyncRules({ - content: content - }); - - return { - slot_name: sync_rules.slot_name - }; - } - }); - -const ValidateSyncRulesRequest = t.object({ - content: t.string -}); - -export const validateSyncRules: RouteGenerator = (router) => - router.post('/api/sync-rules/v1/validate', { - authorize: authApi, - parse: true, - plugins: [yamlPlugin], - validator: framework.schema.createTsCodecValidator(ValidateSyncRulesRequest, { allowAdditional: true }), - handler: async (payload) => { - const content = payload.params.content; - - const info = await debugSyncRules(payload.context.system.requirePgPool(), content); - - replyPrettyJson(payload.reply, info); - } - }); - -export const currentSyncRules: RouteGenerator = (router) => - router.get('/api/sync-rules/v1/current', { - authorize: authApi, - handler: async (payload) => { - const storage = payload.context.system.storage; - const sync_rules = await storage.getActiveSyncRulesContent(); - if (!sync_rules) { - throw new framework.errors.JourneyError({ - status: 422, - code: 'NO_SYNC_RULES', - description: 'No active sync rules' - }); - } - const info = await debugSyncRules(payload.context.system.requirePgPool(), sync_rules.sync_rules_content); - const next = await storage.getNextSyncRulesContent(); - - const next_info = next - ? await debugSyncRules(payload.context.system.requirePgPool(), next.sync_rules_content) - : null; - - const response = { - current: { - slot_name: sync_rules.slot_name, - content: sync_rules.sync_rules_content, - ...info - }, - next: - next == null - ? null - : { - slot_name: next.slot_name, - content: next.sync_rules_content, - ...next_info - } - }; - replyPrettyJson(payload.reply, { data: response }); - } - }); - -const ReprocessSyncRulesRequest = t.object({}); - -export const reprocessSyncRules: RouteGenerator = (router) => - router.post('/api/sync-rules/v1/reprocess', { - authorize: authApi, - validator: framework.schema.createTsCodecValidator(ReprocessSyncRulesRequest), - handler: async (payload) => { - const storage = payload.context.system.storage; - const sync_rules = await storage.getActiveSyncRules(); - if (sync_rules == null) { - throw new framework.errors.JourneyError({ - status: 422, - code: 'NO_SYNC_RULES', - description: 'No active sync rules' - }); - } - - const new_rules = await storage.updateSyncRules({ - content: sync_rules.sync_rules.content - }); - return { - slot_name: new_rules.slot_name - }; - } - }); - -export const syncRulesRoutes = [validateSyncRules, deploySyncRules, reprocessSyncRules, currentSyncRules]; - -function replyPrettyJson(reply: FastifyReply, payload: any) { - reply - .status(200) - .header('Content-Type', 'application/json') - .send(JSON.stringify(payload, null, 2) + '\n'); -} - -async function debugSyncRules(db: pgwire.PgClient, sync_rules: string) { - try { - const rules = SqlSyncRules.fromYaml(sync_rules); - const source_table_patterns = rules.getSourceTables(); - const wc = new replication.WalConnection({ - db: db, - sync_rules: rules - }); - const resolved_tables = await wc.getDebugTablesInfo(source_table_patterns); - - return { - valid: true, - bucket_definitions: rules.bucket_descriptors.map((d) => { - let all_parameter_queries = [...d.parameter_queries.values()].flat(); - let all_data_queries = [...d.data_queries.values()].flat(); - return { - name: d.name, - bucket_parameters: d.bucket_parameters, - global_parameter_queries: d.global_parameter_queries.map((q) => { - return { - sql: q.sql - }; - }), - parameter_queries: all_parameter_queries.map((q) => { - return { - sql: q.sql, - table: q.sourceTable, - input_parameters: q.input_parameters - }; - }), - - data_queries: all_data_queries.map((q) => { - return { - sql: q.sql, - table: q.sourceTable, - columns: q.columnOutputNames() - }; - }) - }; - }), - source_tables: resolved_tables, - data_tables: rules.debugGetOutputTables() - }; - } catch (e) { - if (e instanceof SyncRulesErrors) { - return { - valid: false, - errors: e.errors.map((e) => e.message) - }; - } - return { - valid: false, - errors: [e.message] - }; - } -} diff --git a/packages/service-core/src/routes/sync-stream.ts b/packages/service-core/src/routes/sync-stream.ts deleted file mode 100644 index b6e727c96..000000000 --- a/packages/service-core/src/routes/sync-stream.ts +++ /dev/null @@ -1,92 +0,0 @@ -import { Readable } from 'stream'; -import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; - -import * as sync from '../sync/sync-index.js'; -import * as util from '../util/util-index.js'; - -import { authUser } from './auth.js'; -import { RouteGenerator } from './router.js'; -import { Metrics } from '../metrics/Metrics.js'; -import * as framework from '@powersync/service-framework'; - -export enum SyncRoutes { - STREAM = '/sync/stream' -} - -export const syncStreamed: RouteGenerator = (router) => - router.post(SyncRoutes.STREAM, { - authorize: authUser, - validator: framework.schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }), - handler: async (payload) => { - const userId = payload.context.user_id!; - const system = payload.context.system; - - if (system.closed) { - throw new framework.errors.JourneyError({ - status: 503, - code: 'SERVICE_UNAVAILABLE', - description: 'Service temporarily unavailable' - }); - } - - const params: util.StreamingSyncRequest = payload.params; - const syncParams: SyncParameters = normalizeTokenParameters(payload.context.token_payload!.parameters ?? {}); - - const storage = system.storage; - // Sanity check before we start the stream - const cp = await storage.getActiveCheckpoint(); - if (!cp.hasSyncRules()) { - throw new framework.errors.JourneyError({ - status: 500, - code: 'NO_SYNC_RULES', - description: 'No sync rules available' - }); - } - - const res = payload.reply; - - res.status(200).header('Content-Type', 'application/x-ndjson'); - - const controller = new AbortController(); - try { - Metrics.getInstance().concurrent_connections.add(1); - const stream = Readable.from( - sync.transformToBytesTracked( - sync.ndjson( - sync.streamResponse({ - storage, - params, - syncParams, - token: payload.context.token_payload!, - signal: controller.signal - }) - ) - ), - { objectMode: false, highWaterMark: 16 * 1024 } - ); - - const deregister = system.addStopHandler(() => { - // This error is not currently propagated to the client - controller.abort(); - stream.destroy(new Error('Shutting down system')); - }); - stream.on('close', () => { - deregister(); - }); - - stream.on('error', (error) => { - controller.abort(); - // Note: This appears as a 200 response in the logs. - if (error.message != 'Shutting down system') { - framework.logger.error('Streaming sync request failed', error); - } - }); - await res.send(stream); - } finally { - controller.abort(); - Metrics.getInstance().concurrent_connections.add(-1); - // Prevent double-send - res.hijack(); - } - } - }); diff --git a/packages/service-core/src/system/CorePowerSyncSystem.ts b/packages/service-core/src/system/CorePowerSyncSystem.ts index a912ef3cf..a00cef19d 100644 --- a/packages/service-core/src/system/CorePowerSyncSystem.ts +++ b/packages/service-core/src/system/CorePowerSyncSystem.ts @@ -41,8 +41,6 @@ export abstract class CorePowerSyncSystem extends LifeCycledSystem { * [addStopHandler]. * This should be called after the server is started and it's termination handler is added. * This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit - * - * TODO this could be improved once router terminations are handled */ addTerminationHandler() { this.terminationHandler.handleTerminationSignal(async () => { diff --git a/packages/service-framework/src/codec/codecs.ts b/packages/service-framework/src/codec/codecs.ts index 5c7acc920..b584de036 100644 --- a/packages/service-framework/src/codec/codecs.ts +++ b/packages/service-framework/src/codec/codecs.ts @@ -80,45 +80,3 @@ export const Resource = ResourceId.and(Timestamps); export const QueryFilter = t.object({ exists: t.boolean }); - -export const makeQueryFilter = (type: t.AnyCodec) => { - return type.or(t.array(type)).or(QueryFilter).optional(); -}; - -export const FilterProperties = ( - type: T -): t.Codec< - { [K in keyof t.Encoded]?: t.Ix[K] | t.Ix[K][] | t.Ix }, - { [K in keyof t.Encoded]?: t.Ox[K] | t.Ox[K][] | t.Ox }, - 'FilterProperties' -> => { - let codecs = new Map(); - - const addCodecs = (codec: t.ObjectCodec) => { - if (codec.props?.shape) { - Object.keys(codec.props.shape).forEach((k) => { - codecs.set(k, codec.props.shape[k]); - }); - } - }; - - if (type._tag === t.CodecType.Object) { - addCodecs(type); - } else if (type._tag === t.CodecType.Intersection) { - type.props.codecs.forEach((c: t.AnyCodec) => { - addCodecs(c); - }); - } - - t.object({ - test: t.string - }); - - // @ts-ignore - return t.object( - Array.from(codecs.keys()).reduce((prev: any, cur) => { - prev[cur] = makeQueryFilter(codecs.get(cur)!); - return prev; - }, {}) - ); -}; diff --git a/packages/service-framework/src/errors/framework-errors.ts b/packages/service-framework/src/errors/framework-errors.ts index cbde26db0..e09cc0772 100644 --- a/packages/service-framework/src/errors/framework-errors.ts +++ b/packages/service-framework/src/errors/framework-errors.ts @@ -1,17 +1,3 @@ -/** - * This error class is designed to give consumers of Journey Micro - * a consistent way of "throwing" errors. Specifically, these errors - * will give clients to Journey Micro implementations two things: - * - * 1. An consistent, static error code by which to easily classify errors - * 2. An error message intended for humans - * - * Errors will usually assume that there is some client side error and default to 400 for - * a rest-like response. This can be changed however to more accurately, in restful terms, - * indicate what went wrong. - * - */ - export enum ErrorSeverity { INFO = 'info', WARNING = 'warning', @@ -40,6 +26,10 @@ export class JourneyError extends Error { errorData: ErrorData; + static isJourneyError(input: any): input is JourneyError { + return input instanceof JourneyError || input?.is_journey_error == true; + } + constructor(data: ErrorData) { super(`[${data.code}] ${data.description}\n ${data.details}`); diff --git a/packages/service-framework/src/errors/utils.ts b/packages/service-framework/src/errors/utils.ts index 9ff2ec349..878d924f4 100644 --- a/packages/service-framework/src/errors/utils.ts +++ b/packages/service-framework/src/errors/utils.ts @@ -1,7 +1,7 @@ import { ErrorData, JourneyError } from './framework-errors.js'; export const isJourneyError = (err: any): err is JourneyError => { - const matches = err instanceof JourneyError || err.is_journey_error; + const matches = JourneyError.isJourneyError(err); return !!matches; }; diff --git a/packages/service-framework/src/index.ts b/packages/service-framework/src/index.ts index c121e2041..a679bc2e7 100644 --- a/packages/service-framework/src/index.ts +++ b/packages/service-framework/src/index.ts @@ -15,6 +15,9 @@ export * as schema from './schema/schema-index.js'; export * from './signals/signals-index.js'; export * as signals from './signals/signals-index.js'; +export * from './router/router-index.js'; +export * as router from './router/router-index.js'; + export * from './system/LifeCycledSystem.js'; export * as system from './system/system-index.js'; diff --git a/packages/service-framework/src/router/endpoint.ts b/packages/service-framework/src/router/endpoint.ts new file mode 100644 index 000000000..7bdb970a3 --- /dev/null +++ b/packages/service-framework/src/router/endpoint.ts @@ -0,0 +1,23 @@ +import * as errors from '../errors/errors-index.js'; +import { Endpoint, EndpointHandlerPayload } from './router-definitions.js'; + +/** + * Executes an endpoint's definition in the correct lifecycle order: + * Validations are checked. A {@link ValidationError} is thrown if validations fail. + * Authorization is checked. A {@link AuthorizationError} is thrown if checks fail. + */ +export const executeEndpoint = async >( + endpoint: Endpoint, + payload: P +) => { + const validation_response = await endpoint.validator?.validate(payload.params); + if (validation_response && !validation_response.valid) { + throw new errors.ValidationError(validation_response.errors); + } + const authorizer_response = await endpoint.authorize?.(payload); + if (authorizer_response && !authorizer_response.authorized) { + throw new errors.AuthorizationError(authorizer_response.errors); + } + + return endpoint.handler(payload); +}; diff --git a/packages/service-framework/src/router/router-definitions.ts b/packages/service-framework/src/router/router-definitions.ts new file mode 100644 index 000000000..17f8a3376 --- /dev/null +++ b/packages/service-framework/src/router/router-definitions.ts @@ -0,0 +1,51 @@ +import { MicroValidator } from '../schema/definitions.js'; + +/** + * Subset of HTTP methods used in route definitions + */ +export enum HTTPMethod { + 'DELETE' = 'DELETE', + 'GET' = 'GET', + 'HEAD' = 'HEAD', + 'PATCH' = 'PATCH', + 'POST' = 'POST', + 'PUT' = 'PUT', + 'OPTIONS' = 'OPTIONS' +} + +/** + * Response for authorization checks in route definitions + */ +export type AuthorizationResponse = + | { + authorized: true; + } + | { + authorized: false; + errors?: any[]; + }; + +/** + * Payload which is passed to route endpoint handler functions + */ +export type EndpointHandlerPayload = { + params: P; + context: C; +}; + +/** + * A route endpoint handler function + */ +export type EndpointHandler = (payload: P) => O | Promise; + +/** + * The definition of a route endpoint + */ +export type Endpoint, H = EndpointHandler> = { + path: string; + method: HTTPMethod; + tags?: EndpointHandler>; + authorize?: EndpointHandler; + validator?: MicroValidator; + handler: H; +}; diff --git a/packages/service-framework/src/router/router-index.ts b/packages/service-framework/src/router/router-index.ts new file mode 100644 index 000000000..dea1f9515 --- /dev/null +++ b/packages/service-framework/src/router/router-index.ts @@ -0,0 +1,3 @@ +export * from './router-definitions.js'; +export * from './router-response.js'; +export * from './endpoint.js'; diff --git a/packages/service-framework/src/router/router-response.ts b/packages/service-framework/src/router/router-response.ts new file mode 100644 index 000000000..904fffac6 --- /dev/null +++ b/packages/service-framework/src/router/router-response.ts @@ -0,0 +1,48 @@ +export const isAsyncIterable = (iterable: any): iterable is AsyncIterable => { + return iterable != null && typeof iterable === 'object' && Symbol.asyncIterator in iterable; +}; + +export type RouterResponseParams = { + status?: number; + data: T; + headers?: Record; + /** + * Hook to be called after the response has been sent + */ + afterSend?: () => Promise; +}; + +/** + * Wrapper for router responses. + * A {@link EndpointHandler} can return a raw JSON object to be used as the response data + * payload or a [RouterResponse] which allows for more granular control of what is sent. + */ +export class RouterResponse { + status: number; + data: T; + headers: Record; + afterSend: () => Promise; + + __micro_router_response = true; + + static isRouterResponse(input: any): input is RouterResponse { + return input instanceof RouterResponse || input?.__micro_router_response == true; + } + + constructor(params: RouterResponseParams) { + this.status = params.status || 200; + this.data = params.data; + this.headers = params.headers || {}; + this.afterSend = params.afterSend ?? (() => Promise.resolve()); + + if (!this.headers['Content-Type']) { + if (isAsyncIterable(this.data)) { + this.headers['Content-Type'] = 'application/octet-stream'; + } else if (Buffer.isBuffer(this.data)) { + this.headers['Content-Type'] = 'application/octet-stream'; + } else { + this.headers['Content-Type'] = 'application/json'; + } + } + } +} diff --git a/packages/service-framework/src/schema/schema-index.ts b/packages/service-framework/src/schema/schema-index.ts index 6dceedbbd..813e17060 100644 --- a/packages/service-framework/src/schema/schema-index.ts +++ b/packages/service-framework/src/schema/schema-index.ts @@ -1,6 +1,6 @@ -export * from './definitions'; +export * from './definitions.js'; -export * from './json-schema/keywords'; -export * from './json-schema/parser'; -export * from './validators/schema-validator'; -export * from './validators/ts-codec-validator'; +export * from './json-schema/keywords.js'; +export * from './json-schema/parser.js'; +export * from './validators/schema-validator.js'; +export * from './validators/ts-codec-validator.js'; diff --git a/packages/service-framework/src/schema/utils.ts b/packages/service-framework/src/schema/utils.ts index af96afe32..2efc2498d 100644 --- a/packages/service-framework/src/schema/utils.ts +++ b/packages/service-framework/src/schema/utils.ts @@ -1,5 +1,5 @@ -import * as errors from '../errors/errors-index'; -import * as defs from './definitions'; +import * as errors from '../errors/errors-index.js'; +import * as defs from './definitions.js'; export type Schema = { additionalProperties?: boolean | Schema; diff --git a/packages/service-framework/src/signals/termination-handler.ts b/packages/service-framework/src/signals/termination-handler.ts index 92170acde..9edbf1995 100644 --- a/packages/service-framework/src/signals/termination-handler.ts +++ b/packages/service-framework/src/signals/termination-handler.ts @@ -1,5 +1,5 @@ import _ from 'lodash'; -import { logger } from '../logger/Logger'; +import { logger } from '../logger/Logger.js'; export enum Signal { SIGTERM = 'SIGTERM', @@ -24,8 +24,8 @@ type TerminationHandlerParams = { }; /** - * Utility function to handle external termination signals. Calls an async handler - * and then kills the application. + * Utility function to handle external termination signals. + * Calls an async handler and then kills the application. */ export const createTerminationHandler = (params?: TerminationHandlerParams) => { const { signals = Object.values(Signal), timeout_ms = 30000 } = params || {}; diff --git a/packages/service-framework/src/utils/environment-variables.ts b/packages/service-framework/src/utils/environment-variables.ts index 733bcf4c7..8bbeb11aa 100644 --- a/packages/service-framework/src/utils/environment-variables.ts +++ b/packages/service-framework/src/utils/environment-variables.ts @@ -26,9 +26,7 @@ const boolean = t const list = t.string().transform((value) => value.split(',')); -export const DefaultSchema = t.object({ - DEV_MODE_DO_NOT_ENABLE_IN_PRODUCTION_OR_YOU_WILL_BE_FIRED: boolean.default('false') -}); +export const DefaultSchema = t.object({}); export type DefaultSchema = t.infer; export const collectEnvironmentVariablesFromSchema = ( diff --git a/packages/service-framework/test/src/errors.test.ts b/packages/service-framework/test/src/errors.test.ts index 200247281..82cccd901 100644 --- a/packages/service-framework/test/src/errors.test.ts +++ b/packages/service-framework/test/src/errors.test.ts @@ -1,8 +1,8 @@ import { describe, test, expect } from 'vitest'; -import * as micro_errors from '../../src/errors/errors-index'; +import * as errors from '../../src/errors/errors-index.js'; -class CustomJourneyError extends micro_errors.JourneyError { +class CustomJourneyError extends errors.JourneyError { constructor() { super({ code: 'CUSTOM_JOURNEY_ERROR', @@ -17,7 +17,7 @@ describe('errors', () => { const error = new CustomJourneyError(); expect(error instanceof Error).toBe(true); - expect(error instanceof micro_errors.JourneyError).toBe(true); + expect(error instanceof errors.JourneyError).toBe(true); expect(error.name).toBe('CustomJourneyError'); }); @@ -37,13 +37,13 @@ describe('errors', () => { const standard_error = new Error('non-journey error'); const error = new CustomJourneyError(); - expect(micro_errors.isJourneyError(standard_error)).toBe(false); - expect(micro_errors.isJourneyError(error)).toBe(true); + expect(errors.isJourneyError(standard_error)).toBe(false); + expect(errors.isJourneyError(error)).toBe(true); - expect(micro_errors.matchesErrorCode(error, 'CUSTOM_JOURNEY_ERROR')).toBe(true); - expect(micro_errors.matchesErrorCode(standard_error, 'CUSTOM_JOURNEY_ERROR')).toBe(false); + expect(errors.matchesErrorCode(error, 'CUSTOM_JOURNEY_ERROR')).toBe(true); + expect(errors.matchesErrorCode(standard_error, 'CUSTOM_JOURNEY_ERROR')).toBe(false); - expect(micro_errors.getErrorData(error)).toMatchSnapshot(); - expect(micro_errors.getErrorData(standard_error)).toBe(undefined); + expect(errors.getErrorData(error)).toMatchSnapshot(); + expect(errors.getErrorData(standard_error)).toBe(undefined); }); }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e7b8a2a1f..b6791c4ad 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -153,6 +153,9 @@ importers: '@powersync/service-types': specifier: workspace:* version: link:../types + async: + specifier: ^3.2.4 + version: 3.2.5 async-mutex: specifier: ^0.5.0 version: 0.5.0 @@ -165,9 +168,6 @@ importers: cors: specifier: ^2.8.5 version: 2.8.5 - fastify: - specifier: 4.23.2 - version: 4.23.2 ipaddr.js: specifier: ^2.1.0 version: 2.2.0 @@ -205,12 +205,21 @@ importers: specifier: ^2.3.2 version: 2.4.2 devDependencies: + '@types/async': + specifier: ^3.2.24 + version: 3.2.24 '@types/lodash': specifier: ^4.17.5 version: 4.17.5 '@types/uuid': specifier: ^9.0.4 version: 9.0.8 + fastify: + specifier: 4.23.2 + version: 4.23.2 + fastify-plugin: + specifier: ^4.5.1 + version: 4.5.1 typescript: specifier: ^5.2.2 version: 5.4.5 @@ -1470,6 +1479,9 @@ packages: '@types/accepts@1.3.7': resolution: {integrity: sha512-Pay9fq2lM2wXPWbteBsRAGiWH2hig4ZE2asK+mm7kUzlxRTfL961rj89I6zV/E3PcIkDqyuBEcMxFT7rccugeQ==} + '@types/async@3.2.24': + resolution: {integrity: sha512-8iHVLHsCCOBKjCF2KwFe0p9Z3rfM9mL+sSP8btyR5vTjJRAqpBYD28/ZLgXPf0pjG1VxOvtCV/BgXkQbpSe8Hw==} + '@types/body-parser@1.19.5': resolution: {integrity: sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==} @@ -6080,7 +6092,7 @@ snapshots: '@opentelemetry/semantic-conventions': 1.25.0 '@prisma/instrumentation': 5.15.0 '@sentry/core': 8.9.2 - '@sentry/opentelemetry': 8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.0) + '@sentry/opentelemetry': 8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.6.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/semantic-conventions@1.25.0) '@sentry/types': 8.9.2 '@sentry/utils': 8.9.2 optionalDependencies: @@ -6088,7 +6100,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@sentry/opentelemetry@8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.0)': + '@sentry/opentelemetry@8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.6.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/semantic-conventions@1.25.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) @@ -6165,6 +6177,8 @@ snapshots: dependencies: '@types/node': 18.11.11 + '@types/async@3.2.24': {} + '@types/body-parser@1.19.5': dependencies: '@types/connect': 3.4.38 diff --git a/service/src/routes/router.ts b/service/src/routes/router.ts index 0d3f5e5c1..4fd55f19f 100644 --- a/service/src/routes/router.ts +++ b/service/src/routes/router.ts @@ -1,33 +1,6 @@ -import * as micro from '@journeyapps-platform/micro'; import { ReactiveSocketRouter } from '@powersync/service-rsocket-router'; import { routes } from '@powersync/service-core'; -export const Router = new micro.fastify.FastifyRouter({ - concurrency: { - max_queue_depth: 20, - // TODO: Test max concurrency. - max_concurrent_requests: 10 - }, - tags: () => { - return {}; - }, - cors: false // CORS is manually registered +export const SocketRouter = new ReactiveSocketRouter({ + max_concurrent_connections: 200 }); - -// Separate router so that we have a separate queue for streaming requests. -export const StreamingRouter = new micro.fastify.FastifyRouter({ - concurrency: { - // Don't queue more requests, since it could wait a very long time. - // KLUDGE: the queue doesn't support a max depth of 0 currently. - max_queue_depth: 1, - max_concurrent_requests: 200 - }, - tags: () => { - return {}; - }, - cors: false // CORS is manually registered -}); - -export const SocketRouter = new ReactiveSocketRouter(); - -export default Router; diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index 7eccf3345..677b6f7ea 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -1,38 +1,71 @@ import { deserialize } from 'bson'; import fastify from 'fastify'; import cors from '@fastify/cors'; -import * as micro from '@journeyapps-platform/micro'; import * as framework from '@powersync/service-framework'; +import * as core from '@powersync/service-core'; import { RSocketRequestMeta } from '@powersync/service-rsocket-router'; -import { Metrics, routes, utils } from '@powersync/service-core'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; -import { Router, SocketRouter, StreamingRouter } from '../routes/router.js'; +import { SocketRouter } from '../routes/router.js'; /** * Starts an API server */ -export async function startServer(runnerConfig: utils.RunnerConfig) { +export async function startServer(runnerConfig: core.utils.RunnerConfig) { framework.logger.info('Booting'); - const config = await utils.loadConfig(runnerConfig); + const config = await core.utils.loadConfig(runnerConfig); const system = new PowerSyncSystem(config); const server = fastify.fastify(); - server.register( - Router.plugin({ - routes: [...routes.generateHTTPRoutes(Router), ...micro.router.createProbeRoutes()], - contextProvider: (payload) => { + + // Create a separate context for concurrency queueing + server.register(async function (childContext) { + core.routes.registerFastifyRoutes( + childContext, + async () => { return { user_id: undefined, system: system }; - } - }) - ); + }, + [ + ...core.routes.endpoints.ADMIN_ROUTES, + ...core.routes.endpoints.CHECKPOINT_ROUTES, + ...core.routes.endpoints.DEV_ROUTES, + ...core.routes.endpoints.SYNC_RULES_ROUTES + ] + ); + // Limit the active concurrent requests + childContext.addHook( + 'onRequest', + core.routes.hooks.createRequestQueueHook({ + max_queue_depth: 20, + concurrency: 10 + }) + ); + }); - server.route({ - method: fastify. - }) + // Create a separate context for concurrency queueing + server.register(async function (childContext) { + core.routes.registerFastifyRoutes( + childContext, + async () => { + return { + user_id: undefined, + system: system + }; + }, + [...core.routes.endpoints.SYNC_STREAM_ROUTES] + ); + // Limit the active concurrent requests + childContext.addHook( + 'onRequest', + core.routes.hooks.createRequestQueueHook({ + max_queue_depth: 0, + concurrency: 200 + }) + ); + }); server.register(cors, { origin: '*', @@ -42,30 +75,18 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { maxAge: 3600 }); - server.register( - StreamingRouter.plugin({ - routes: routes.generateHTTPStreamRoutes(StreamingRouter), - contextProvider: (payload) => { - return { - user_id: undefined, - system: system - }; - } - }) - ); - SocketRouter.applyWebSocketEndpoints(server.server, { contextProvider: async (data: Buffer) => { - const { token } = routes.RSocketContextMeta.decode(deserialize(data) as any); + const { token } = core.routes.RSocketContextMeta.decode(deserialize(data) as any); if (!token) { throw new framework.errors.ValidationError('No token provided in context'); } try { - const extracted_token = routes.auth.getTokenFromHeader(token); + const extracted_token = core.routes.auth.getTokenFromHeader(token); if (extracted_token != null) { - const { context, errors } = await routes.auth.generateContext(system, extracted_token); + const { context, errors } = await core.routes.auth.generateContext(system, extracted_token); return { token, ...context, @@ -82,7 +103,7 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { system }; }, - endpoints: routes.generateSocketRoutes(SocketRouter), + endpoints: [core.routes.endpoints.syncStreamReactive(SocketRouter)], metaDecoder: async (meta: Buffer) => { return RSocketRequestMeta.decode(deserialize(meta) as any); }, @@ -93,11 +114,19 @@ export async function startServer(runnerConfig: utils.RunnerConfig) { await system.start(); framework.logger.info('System started'); - Metrics.getInstance().configureApiMetrics(); + core.Metrics.getInstance().configureApiMetrics(); + + await server.listen({ + port: system.config.port + }); - await micro.fastify.startServer(server, system.config.port); + system.terminationHandler.handleTerminationSignal(async () => { + framework.logger.info('Shutting down HTTP server...'); + await server.close(); + framework.logger.info('HTTP server stopped'); + }); - // MUST be after startServer. + // MUST be after adding the termination handler above. // This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit system.addTerminationHandler(); From 4e59b334bccc57574dc9dfd7d2cac9493c56e021 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 09:40:44 +0200 Subject: [PATCH 16/36] add migrations and mongo locks functionality --- package.json | 2 +- packages/rsocket-router/package.json | 1 - packages/service-core/package.json | 2 - .../src/entry/commands/migrate-action.ts | 2 +- packages/service-core/src/index.ts | 2 +- .../service-core/src/locks/LockManager.ts | 16 + packages/service-core/src/locks/MongoLocks.ts | 142 ++ .../service-core/src/locks/locks-index.ts | 2 + .../service-core/src/migrations/db/store.ts | 11 - .../src/migrations/definitions.ts | 21 + .../service-core/src/migrations/executor.ts | 87 + .../src/migrations/migrations-index.ts | 3 + .../service-core/src/migrations/migrations.ts | 14 +- .../src/migrations/store/migration-store.ts | 63 + packages/service-core/src/routes/hooks.ts | 2 +- .../src/storage/MongoBucketStorage.ts | 2 +- packages/service-core/src/storage/mongo/db.ts | 5 +- .../test/src/schema/parser.test.ts | 2 +- .../test/src/schema/schema-validation.test.ts | 4 +- .../src/schema/ts-codec-validation.test.ts | 2 +- pnpm-lock.yaml | 1527 +---------------- service/package.json | 2 - service/src/runners/stream-worker.ts | 3 +- 23 files changed, 377 insertions(+), 1540 deletions(-) create mode 100644 packages/service-core/src/locks/LockManager.ts create mode 100644 packages/service-core/src/locks/MongoLocks.ts create mode 100644 packages/service-core/src/locks/locks-index.ts delete mode 100644 packages/service-core/src/migrations/db/store.ts create mode 100644 packages/service-core/src/migrations/definitions.ts create mode 100644 packages/service-core/src/migrations/executor.ts create mode 100644 packages/service-core/src/migrations/migrations-index.ts create mode 100644 packages/service-core/src/migrations/store/migration-store.ts diff --git a/package.json b/package.json index a5a5f4ace..2a896dfa2 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,7 @@ "build:packages": "pnpm run build", "build:production": "NODE_ENV=production pnpm run -r build", "build:ts": "tsc -b", - "watch:ts": "pnpm build:ts -w", + "watch:ts": "pnpm build:ts -w --preserveWatchOutput", "watch:service": "concurrently --passthrough-arguments \"pnpm watch:ts\" \" pnpm start:service {@}\" -- ", "start:service": "pnpm --filter powersync-open-service watch", "clean": "pnpm run -r clean", diff --git a/packages/rsocket-router/package.json b/packages/rsocket-router/package.json index 3ddc9400f..79aa5f46c 100644 --- a/packages/rsocket-router/package.json +++ b/packages/rsocket-router/package.json @@ -18,7 +18,6 @@ "test": "vitest" }, "dependencies": { - "@journeyapps-platform/micro": "^17.0.1", "@powersync/service-framework": "workspace:*", "rsocket-core": "1.0.0-alpha.3", "ts-codec": "^1.2.2", diff --git a/packages/service-core/package.json b/packages/service-core/package.json index 3bfd761f1..256a81bc2 100644 --- a/packages/service-core/package.json +++ b/packages/service-core/package.json @@ -16,8 +16,6 @@ "clean": "rm -rf ./lib && tsc -b --clean" }, "dependencies": { - "@journeyapps-platform/micro": "^17.0.1", - "@journeyapps-platform/micro-migrate": "^4.0.1", "@opentelemetry/api": "~1.8.0", "@opentelemetry/exporter-metrics-otlp-http": "^0.51.1", "@opentelemetry/exporter-prometheus": "^0.51.1", diff --git a/packages/service-core/src/entry/commands/migrate-action.ts b/packages/service-core/src/entry/commands/migrate-action.ts index c18aa4ede..24f3d1579 100644 --- a/packages/service-core/src/entry/commands/migrate-action.ts +++ b/packages/service-core/src/entry/commands/migrate-action.ts @@ -1,8 +1,8 @@ import { Command } from 'commander'; -import { Direction } from '@journeyapps-platform/micro-migrate'; import { extractRunnerOptions, wrapConfigCommand } from './config-command.js'; import { migrate } from '../../migrations/migrations.js'; +import { Direction } from '../../migrations/definitions.js'; const COMMAND_NAME = 'migrate'; diff --git a/packages/service-core/src/index.ts b/packages/service-core/src/index.ts index 6cf074cbf..1b88bbf98 100644 --- a/packages/service-core/src/index.ts +++ b/packages/service-core/src/index.ts @@ -16,7 +16,7 @@ export * from './metrics/Metrics.js'; export * as metrics from './metrics/Metrics.js'; export * from './migrations/migrations.js'; -export * as migrations from './migrations/migrations.js'; +export * as migrations from './migrations/migrations-index.js'; export * from './replication/replication-index.js'; export * as replication from './replication/replication-index.js'; diff --git a/packages/service-core/src/locks/LockManager.ts b/packages/service-core/src/locks/LockManager.ts new file mode 100644 index 000000000..6bb534885 --- /dev/null +++ b/packages/service-core/src/locks/LockManager.ts @@ -0,0 +1,16 @@ +import * as bson from 'bson'; + +export class LockActiveError extends Error { + constructor() { + super('Lock is already active'); + this.name = this.constructor.name; + } +} + +export type LockManager = { + acquire: () => Promise; + refresh: (lock_id: bson.ObjectId) => Promise; + release: (lock_id: bson.ObjectId) => Promise; + + lock: (handler: (refresh: () => Promise) => Promise) => Promise; +}; diff --git a/packages/service-core/src/locks/MongoLocks.ts b/packages/service-core/src/locks/MongoLocks.ts new file mode 100644 index 000000000..33ad7bc8a --- /dev/null +++ b/packages/service-core/src/locks/MongoLocks.ts @@ -0,0 +1,142 @@ +import * as mongo from 'mongodb'; +import * as bson from 'bson'; +import { LockActiveError, LockManager } from './LockManager.js'; + +/** + * Lock Document Schema + */ +export type Lock = { + name: string; + active_lock?: { + lock_id: bson.ObjectId; + ts: Date; + }; +}; + +export type Collection = mongo.Collection; + +export type AcquireLockParams = { + /** + * Name of the process/user trying to acquire the lock. + */ + name: string; + /** + * The TTL of the lock (ms). Default: 60000 ms (1 min) + */ + timeout?: number; +}; + +const DEFAULT_LOCK_TIMEOUT = 60 * 1000; // 1 minute + +const acquireLock = async (collection: Collection, params: AcquireLockParams) => { + const now = new Date(); + const lock_timeout = params.timeout ?? DEFAULT_LOCK_TIMEOUT; + const lock_id = new bson.ObjectId(); + + await collection.updateOne( + { + name: params.name + }, + { + $setOnInsert: { + name: params.name + } + }, + { + upsert: true + } + ); + + const expired_ts = now.getTime() - lock_timeout; + + const res = await collection.updateOne( + { + $and: [ + { name: params.name }, + { + $or: [{ active_lock: { $exists: false } }, { 'active_lock.ts': { $lte: new Date(expired_ts) } }] + } + ] + }, + { + $set: { + active_lock: { + lock_id: lock_id, + ts: now + } + } + } + ); + + if (res.modifiedCount === 0) { + return null; + } + + return lock_id; +}; + +const refreshLock = async (collection: Collection, lock_id: bson.ObjectId) => { + const res = await collection.updateOne( + { + 'active_lock.lock_id': lock_id + }, + { + $set: { + 'active_lock.ts': new Date() + } + } + ); + + if (res.modifiedCount === 0) { + throw new Error('Lock not found, could not refresh'); + } +}; + +export const releaseLock = async (collection: Collection, lock_id: bson.ObjectId) => { + const res = await collection.updateOne( + { + 'active_lock.lock_id': lock_id + }, + { + $unset: { + active_lock: true + } + } + ); + + if (res.modifiedCount === 0) { + throw new Error('Lock not found, could not release'); + } +}; + +export type CreateLockManagerParams = { + /** + * Name of the process/user trying to acquire the lock. + */ + name: string; + /** + * The TTL for each lock (ms). Default: 60000 ms (1 min) + */ + timeout?: number; +}; + +export const createMongoLockManager = (collection: Collection, params: CreateLockManagerParams): LockManager => { + return { + acquire: () => acquireLock(collection, params), + refresh: (lock_id: bson.ObjectId) => refreshLock(collection, lock_id), + release: (lock_id: bson.ObjectId) => releaseLock(collection, lock_id), + + lock: async (handler) => { + const lock_id = await acquireLock(collection, params); + if (!lock_id) { + throw new LockActiveError(); + } + + try { + await handler(() => refreshLock(collection, lock_id)); + } finally { + await releaseLock(collection, lock_id); + } + } + }; +}; diff --git a/packages/service-core/src/locks/locks-index.ts b/packages/service-core/src/locks/locks-index.ts new file mode 100644 index 000000000..3b7e387aa --- /dev/null +++ b/packages/service-core/src/locks/locks-index.ts @@ -0,0 +1,2 @@ +export * from './LockManager.js'; +export * from './MongoLocks.js'; diff --git a/packages/service-core/src/migrations/db/store.ts b/packages/service-core/src/migrations/db/store.ts deleted file mode 100644 index 7f23f4eab..000000000 --- a/packages/service-core/src/migrations/db/store.ts +++ /dev/null @@ -1,11 +0,0 @@ -import * as micro_migrate from '@journeyapps-platform/micro-migrate'; -import * as utils from '../../util/util-index.js'; - -const config = await utils.loadConfig(); - -export default micro_migrate.createMongoMigrationStore({ - uri: config.storage.uri, - database: config.storage.database, - username: config.storage.username, - password: config.storage.password -}); diff --git a/packages/service-core/src/migrations/definitions.ts b/packages/service-core/src/migrations/definitions.ts new file mode 100644 index 000000000..406a9fa0c --- /dev/null +++ b/packages/service-core/src/migrations/definitions.ts @@ -0,0 +1,21 @@ +export type Migration = { + name: string; + up: () => Promise; + down: () => Promise; +}; + +export enum Direction { + Up = 'up', + Down = 'down' +} + +export type ExecutedMigration = { + name: string; + direction: Direction; + timestamp: Date; +}; + +export type MigrationState = { + last_run: string; + log: ExecutedMigration[]; +}; diff --git a/packages/service-core/src/migrations/executor.ts b/packages/service-core/src/migrations/executor.ts new file mode 100644 index 000000000..9b13f4146 --- /dev/null +++ b/packages/service-core/src/migrations/executor.ts @@ -0,0 +1,87 @@ +import * as framework from '@powersync/service-framework'; +import * as defs from './definitions.js'; +import { MigrationStore } from './store/migration-store.js'; + +type ExecuteParams = { + migrations: defs.Migration[]; + state?: defs.MigrationState; + + direction: defs.Direction; + count?: number; +}; + +export async function* execute(params: ExecuteParams): AsyncGenerator { + let migrations = [...params.migrations]; + if (params.direction === defs.Direction.Down) { + migrations = migrations.reverse(); + } + + let index = 0; + + if (params.state) { + // Find the index of the last run + index = migrations.findIndex((migration) => { + return migration.name === params.state!.last_run; + }); + + if (index === -1) { + throw new Error(`The last run migration ${params.state?.last_run} was not found in the given set of migrations`); + } + + // If we are migrating down then we want to include the last run migration, otherwise we want to start at the next one + if (params.direction === defs.Direction.Up) { + index += 1; + } + } + + migrations = migrations.slice(index); + + let i = 0; + for (const migration of migrations) { + if (params.count && params.count === i) { + return; + } + + framework.logger.info(`Executing ${migration.name} (${params.direction})`); + try { + switch (params.direction) { + case defs.Direction.Up: { + await migration.up(); + break; + } + case defs.Direction.Down: { + await migration.down(); + break; + } + } + framework.logger.debug(`Success`); + } catch (err) { + framework.logger.error(`Failed`, err); + process.exit(1); + } + + yield { + name: migration.name, + direction: params.direction, + timestamp: new Date() + }; + + i++; + } +} + +type WriteLogsParams = { + store: MigrationStore; + state?: defs.MigrationState; + log_stream: Iterable | AsyncIterable; +}; +export const writeLogsToStore = async (params: WriteLogsParams): Promise => { + const log = [...(params.state?.log || [])]; + for await (const migration of params.log_stream) { + log.push(migration); + await params.store.save({ + last_run: migration.name, + log: log + }); + } +}; diff --git a/packages/service-core/src/migrations/migrations-index.ts b/packages/service-core/src/migrations/migrations-index.ts new file mode 100644 index 000000000..ae77adaba --- /dev/null +++ b/packages/service-core/src/migrations/migrations-index.ts @@ -0,0 +1,3 @@ +export * from './definitions.js'; +export * from './executor.js'; +export * from './migrations.js'; diff --git a/packages/service-core/src/migrations/migrations.ts b/packages/service-core/src/migrations/migrations.ts index 258bfce2a..572b622d6 100644 --- a/packages/service-core/src/migrations/migrations.ts +++ b/packages/service-core/src/migrations/migrations.ts @@ -1,12 +1,13 @@ -import { locks } from '@journeyapps-platform/micro'; import * as fs from 'fs/promises'; import * as path from 'path'; import { fileURLToPath } from 'url'; -import { Direction, createMongoMigrationStore, execute, writeLogsToStore } from '@journeyapps-platform/micro-migrate'; - import * as db from '../db/db-index.js'; import * as util from '../util/util-index.js'; +import * as locks from '../locks/locks-index.js'; +import { Direction } from './definitions.js'; +import { createMongoMigrationStore } from './store/migration-store.js'; +import { execute, writeLogsToStore } from './executor.js'; const DEFAULT_MONGO_LOCK_COLLECTION = 'locks'; const MONGO_LOCK_PROCESS = 'migrations'; @@ -94,12 +95,7 @@ export const migrate = async (options: MigrationOptions) => { const migrations = await loadMigrations(MIGRATIONS_DIR, runner_config); // Use the provided config to connect to Mongo - const store = createMongoMigrationStore({ - uri: storage.uri, - database: storage.database, - username: storage.username, - password: storage.password - }); + const store = createMongoMigrationStore(client); const state = await store.load(); diff --git a/packages/service-core/src/migrations/store/migration-store.ts b/packages/service-core/src/migrations/store/migration-store.ts new file mode 100644 index 000000000..e89904ff8 --- /dev/null +++ b/packages/service-core/src/migrations/store/migration-store.ts @@ -0,0 +1,63 @@ +import Mongo from 'mongodb'; +import * as path from 'path'; +import * as defs from '../definitions.js'; + +export type MigrationStore = { + load: () => Promise; + save: (state: defs.MigrationState) => Promise; +}; + +/** + * A custom store for node-migrate which is used to save and load migrations that have + * been operated on to mongo. + */ +export const createMongoMigrationStore = (client: Mongo.MongoClient): MigrationStore => { + const collection = client.db().collection('migrations'); + + return { + load: async () => { + const state_entry = await collection.findOne(); + if (!state_entry) { + return; + } + + const { _id, ...state } = state_entry; + + /** + * This is for backwards compatibility. A previous version of the migration tool used to save + * state as `lastRun`. + */ + let last_run = state.last_run; + if ('lastRun' in state) { + last_run = (state as any).lastRun; + } + + /** + * This is for backwards compatibility. A previous version of the migration tool used to include the + * file extension in migration names. This strips that extension off if it exists + */ + const extension = path.extname(last_run); + if (extension) { + last_run = last_run.replace(extension, ''); + } + + return { + last_run, + log: state.log || [] + }; + }, + + save: async (state: defs.MigrationState) => { + await collection.replaceOne( + {}, + { + last_run: state.last_run, + log: state.log + }, + { + upsert: true + } + ); + } + }; +}; diff --git a/packages/service-core/src/routes/hooks.ts b/packages/service-core/src/routes/hooks.ts index 77dd7e3ac..e2ddae4f1 100644 --- a/packages/service-core/src/routes/hooks.ts +++ b/packages/service-core/src/routes/hooks.ts @@ -20,7 +20,7 @@ export const createRequestQueueHook = (params: CreateRequestQueueParams): fastif return (request, reply, next) => { if ( (params.max_queue_depth == 0 && request_queue.running() == params.concurrency) || - request_queue.length() >= params.max_queue_depth + (params.max_queue_depth > 0 && request_queue.length() >= params.max_queue_depth) ) { framework.logger.warn(`${request.method} ${request.url}`, { status: 429, diff --git a/packages/service-core/src/storage/MongoBucketStorage.ts b/packages/service-core/src/storage/MongoBucketStorage.ts index 720733114..39357b72d 100644 --- a/packages/service-core/src/storage/MongoBucketStorage.ts +++ b/packages/service-core/src/storage/MongoBucketStorage.ts @@ -7,6 +7,7 @@ import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js'; import * as replication from '../replication/replication-index.js'; import * as sync from '../sync/sync-index.js'; import * as util from '../util/util-index.js'; +import * as locks from '../locks/locks-index.js'; import { ActiveCheckpoint, @@ -22,7 +23,6 @@ import { MongoSyncBucketStorage } from './mongo/MongoSyncBucketStorage.js'; import { PowerSyncMongo, PowerSyncMongoOptions } from './mongo/db.js'; import { SyncRuleDocument, SyncRuleState } from './mongo/models.js'; import { generateSlotName } from './mongo/util.js'; -import { locks } from '@journeyapps-platform/micro'; import { v4 as uuid } from 'uuid'; import { ErrorReporter, logger } from '@powersync/service-framework'; diff --git a/packages/service-core/src/storage/mongo/db.ts b/packages/service-core/src/storage/mongo/db.ts index 5db901899..1cc3f8471 100644 --- a/packages/service-core/src/storage/mongo/db.ts +++ b/packages/service-core/src/storage/mongo/db.ts @@ -1,8 +1,7 @@ import * as mongo from 'mongodb'; -import * as micro from '@journeyapps-platform/micro'; import * as db from '../../db/db-index.js'; - +import * as locks from '../../locks/locks-index.js'; import { BucketDataDocument, BucketParameterDocument, @@ -36,7 +35,7 @@ export class PowerSyncMongo { readonly source_tables: mongo.Collection; readonly write_checkpoints: mongo.Collection; readonly instance: mongo.Collection; - readonly locks: mongo.Collection; + readonly locks: mongo.Collection; readonly client: mongo.MongoClient; readonly db: mongo.Db; diff --git a/packages/service-framework/test/src/schema/parser.test.ts b/packages/service-framework/test/src/schema/parser.test.ts index a0161aa32..89e9e7630 100644 --- a/packages/service-framework/test/src/schema/parser.test.ts +++ b/packages/service-framework/test/src/schema/parser.test.ts @@ -1,6 +1,6 @@ import { describe, test, expect } from 'vitest'; -import * as framework_schema from '../../../src/schema/schema-index'; +import * as framework_schema from '../../../src/schema/schema-index.js'; describe('schema-tools', () => { test('it should correctly prune unused definitions', () => { diff --git a/packages/service-framework/test/src/schema/schema-validation.test.ts b/packages/service-framework/test/src/schema/schema-validation.test.ts index 13e900c1f..a5980d140 100644 --- a/packages/service-framework/test/src/schema/schema-validation.test.ts +++ b/packages/service-framework/test/src/schema/schema-validation.test.ts @@ -1,7 +1,7 @@ import { describe, test, it, expect } from 'vitest'; -import base_schema from '../fixtures/schema'; -import * as framework_schema from '../../../src/schema/schema-index'; +import base_schema from '../fixtures/schema.js'; +import * as framework_schema from '../../../src/schema/schema-index.js'; const base_validator = framework_schema.createSchemaValidator(base_schema); diff --git a/packages/service-framework/test/src/schema/ts-codec-validation.test.ts b/packages/service-framework/test/src/schema/ts-codec-validation.test.ts index 6a817650c..148c151af 100644 --- a/packages/service-framework/test/src/schema/ts-codec-validation.test.ts +++ b/packages/service-framework/test/src/schema/ts-codec-validation.test.ts @@ -1,7 +1,7 @@ import * as t from 'ts-codec'; import { describe, test, expect } from 'vitest'; -import * as framework_schema from '../../../src/schema/schema-index'; +import * as framework_schema from '../../../src/schema/schema-index.js'; describe('ts-codec validation', () => { enum Values { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b6791c4ad..53008ab02 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -74,9 +74,6 @@ importers: packages/rsocket-router: dependencies: - '@journeyapps-platform/micro': - specifier: ^17.0.1 - version: 17.0.1(encoding@0.1.13)(socks@2.8.3) '@powersync/service-framework': specifier: workspace:* version: link:../service-framework @@ -114,12 +111,6 @@ importers: packages/service-core: dependencies: - '@journeyapps-platform/micro': - specifier: ^17.0.1 - version: 17.0.1(encoding@0.1.13)(socks@2.8.3) - '@journeyapps-platform/micro-migrate': - specifier: ^4.0.1 - version: 4.0.1(socks@2.8.3) '@opentelemetry/api': specifier: ~1.8.0 version: 1.8.0 @@ -300,12 +291,6 @@ importers: '@fastify/cors': specifier: 8.4.1 version: 8.4.1 - '@journeyapps-platform/micro': - specifier: ^17.0.1 - version: 17.0.1(encoding@0.1.13)(socks@2.8.3) - '@journeyapps-platform/micro-migrate': - specifier: ^4.0.1 - version: 4.0.1(socks@2.8.3) '@opentelemetry/api': specifier: ~1.6.0 version: 1.6.0 @@ -415,9 +400,6 @@ importers: packages: - '@apidevtools/json-schema-ref-parser@9.1.2': - resolution: {integrity: sha512-r1w81DpR+KyRWd3f+rk6TNqMgedmAxZP5v5KWlXQWlgMUUtyEJch0DKEci1SorPMiSeM8XPl7MZ3miJ60JIpQg==} - '@babel/code-frame@7.24.6': resolution: {integrity: sha512-ZJhac6FkEd1yhG2AHOmfcXG4ceoLltoCVJjN5XsWN9BifBQr+cHJbWi0h68HZuSORq+3WtJ2z0hwF2NG1b5kcA==} engines: {node: '>=6.9.0'} @@ -657,15 +639,6 @@ packages: '@gar/promisify@1.1.3': resolution: {integrity: sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==} - '@grpc/grpc-js@1.10.8': - resolution: {integrity: sha512-vYVqYzHicDqyKB+NQhAc54I1QWCBLCrYG6unqOIcBTHx+7x8C9lcoLj3KVJXs2VB4lUbpWY+Kk9NipcbXYWmvg==} - engines: {node: '>=12.10.0'} - - '@grpc/proto-loader@0.7.13': - resolution: {integrity: sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==} - engines: {node: '>=6'} - hasBin: true - '@humanwhocodes/momoa@2.0.4': resolution: {integrity: sha512-RE815I4arJFtt+FVeU1Tgp9/Xvecacji8w/V6XtXsWWH/wz/eNkNbhb+ny/+PlVZjV0rxQpRSQKNKE3lcktHEA==} engines: {node: '>=10.10.0'} @@ -682,93 +655,6 @@ packages: resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - '@journeyapps-platform/micro-alerts@4.0.0': - resolution: {integrity: sha512-9uRnfTmIUkEx98RPwt6N3+ldmmdmu9oQ0Em+51efb+dGZd8EKq6eBlM1pXo5ud/5wKBoKz4LH/EYsgYY01Q6HQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-alerts/4.0.0/805317cde6a7fea1dc94b2b398c86b7133513e76} - - '@journeyapps-platform/micro-async-hooks@2.0.0': - resolution: {integrity: sha512-sewN7UydyAlxpcZSnsVOWSvruGE4Kjaer7HqfiETKq4y/Va3lu4Ei/xC5ZfJYhPM44fQCcoAMOpdMr9MczWSFQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-async-hooks/2.0.0/00fa3dd0555eb9b728ca4aecd7cc2709e7985935} - - '@journeyapps-platform/micro-authorizers@6.0.0': - resolution: {integrity: sha512-uc7+OAVROx1llHYwWCeH35198Avw6gAxJBEdD65zUISL2c4xhTlwhBH1JB6l+jEHKwowvHTyjJxUfa6L5ijsSg==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-authorizers/6.0.0/29a593a88f11e19b7cdb55883106d62f42ed9fa1} - - '@journeyapps-platform/micro-codecs@3.0.1': - resolution: {integrity: sha512-AdU5P3TdiI5Z77oyUEa2aUnhHX+sU70GpoQbZm2YVZo5wQTi+Eds0JXxZR84Nk2iLhcoCtziXA87NtwVgoVR5g==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-codecs/3.0.1/b9a15997e1a27f908840f73ee25724348af14e72} - - '@journeyapps-platform/micro-db@3.0.2': - resolution: {integrity: sha512-x0cJCmcm9jB4xQ0Bx4psxAVCZ8HmBXRlN1gC/2p4qRzuw9YpLE8/zV0ElTtotzB2fUVSRzuBqgdrHXY0E4ru4w==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-db/3.0.2/5b224878515b4a3f13d847c3dd9fd0495ffd3339} - - '@journeyapps-platform/micro-errors@3.0.0': - resolution: {integrity: sha512-cAswdlJ3QGsF3EKTNq4EX6JhIJeVg+xHJBzupUiADHMOGbldW7O01ewp/iIsYA0d4GYBERtivOCsXrGZUQax+g==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-errors/3.0.0/cd67505976c54b6aa3e40635faeab0dc8baa4baf} - - '@journeyapps-platform/micro-events@2.0.1': - resolution: {integrity: sha512-Nc1WV5Etyz6nSzgEsv1ar1nsMeaAvhYbnLT/HObL7t3GCBwQKoytpXG+RyDGOtJVynMjEZsKBo8lcPvy997Rvw==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-events/2.0.1/a58bb5a8efcc89cf7f2feb7c46896fe5f4386633} - - '@journeyapps-platform/micro-kafka@8.0.1': - resolution: {integrity: sha512-PkDIC8Nq0TfN7/LHxdiWc0XeWLjJ4S0sa5O7VCdCE/L57MZ4XQimsMaJurfxLJbRgdUh/9zSxBnKxaIvwiZohw==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-kafka/8.0.1/812935e6d4b6fd8d4aa434b0a97bbde196eeef54} - - '@journeyapps-platform/micro-locks@2.0.1': - resolution: {integrity: sha512-rnnmcOpJAgWxtXwK9DDOLWLbLxqUmiFXdzAACMpHT/5BMRmFSVc4MO9cGiEqH8zSR2vagZ70Vq8deJ1qw51heQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-locks/2.0.1/8fee94d8222904b7bc8f60749296714c1cc30894} - - '@journeyapps-platform/micro-logger@3.0.0': - resolution: {integrity: sha512-rJuDhXNi5l+aN21TK7SxJP11buQuEDXlUkPFzUIwvO1YQnNY6uWvEKnrDMPDh1S+1U0Ts06w9p3TeHDjXcLChw==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-logger/3.0.0/7bbc7f27d7bb1f5d91459edc96ad91b9fcbbbef4} - - '@journeyapps-platform/micro-metrics@5.0.0': - resolution: {integrity: sha512-HaCuRoFjy+DSZr/AGdrCW81guB8YzE2nC1mmVfUq/XcxBRpDApfchaRriXQmb7GOUkcisXWdNEe7zPwwqqdiFQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-metrics/5.0.0/44b3855ce54e000378a5ae81eb0929273501fd78} - - '@journeyapps-platform/micro-migrate@4.0.1': - resolution: {integrity: sha512-/4iLVy9Msr3N1P0Zr3FZBZNxp+S9d5C6oXK7NjGGEeGGzMrokyve/EqwIxLmStkZsuGG6XKMmQibJAFEmZiybg==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-migrate/4.0.1/42817233ddcb4c512ef578faaa3f9c72e3abffbf} - hasBin: true - - '@journeyapps-platform/micro-mongo@5.0.1': - resolution: {integrity: sha512-f1hzRwS3854iIZThmVwnev147ix+KDYMK6A6SEY3hrBtq20RF1TwlPRo6JnskMZcvYSatlt2Zu71NyxbQoiBDQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-mongo/5.0.1/8d8796ba0fed43fb20bb156ad5919e4e5c88ffe8} - - '@journeyapps-platform/micro-repl@4.0.0': - resolution: {integrity: sha512-UdXASfo7/Q4gKz9qQY3FuYu0x2tr9flwNa5Lzt100NzUC4Qj0uSaA1s0BkH4MTk/V+notgeR4QLj5Uco0Jg71g==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-repl/4.0.0/d4a8087b4dcaa2bbe20eb1a6fc1ee6b497144045} - hasBin: true - - '@journeyapps-platform/micro-router-docs@2.0.0': - resolution: {integrity: sha512-XjbqtyL7G1oyWaVPaKOb52urYPFW5lRtOlWLhkgN8fU8QfNLlDD3oGbxHYJPndePQaiKOa8BFBfIJI3BFjUqkQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-router-docs/2.0.0/55f749a27ab46aad7f8a4c7aa5b25eea4129a967} - - '@journeyapps-platform/micro-router-express@9.0.0': - resolution: {integrity: sha512-Strc5vVN+IqL5LiMvcYgulGPsJvoUTTr5OsAXmXhssh+7myVgP3n9fArhiWWDENNTuyw4MAllZotV2tauy3Sow==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-router-express/9.0.0/f422f24be944ba4126e4f09f130c35fb8c7f87c4} - - '@journeyapps-platform/micro-router-fastify@3.0.0': - resolution: {integrity: sha512-IYIwEd7CxCYcEXfyHWGJyR+RWCBOVDjuR5Tw4qldgA+7chOU7W42VxkxN7orHiKhemfv+CgZ0wRTSpGLy2CTCQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-router-fastify/3.0.0/c17459a77501f93c700fde72b4a24987682604a5} - - '@journeyapps-platform/micro-router-ws@8.0.0': - resolution: {integrity: sha512-Y7O1QCf6W7ZVDPFZOiJmg9hEMOWGSJ1kaqSNlGokErgK6fujiiZi1ZGnWrPwYnbNuTYwIrvfjsCqV4UN12+82w==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-router-ws/8.0.0/6f554d4919d54516e82b4f8971b57d59c0f05c9f} - - '@journeyapps-platform/micro-router@7.0.1': - resolution: {integrity: sha512-q9cvq8ZXGd2BZCCZBGUWNK/P/3jHZPey3tvjV+Usb5pBUhzu7BihL7uPuj/s5RpscMbd9rtBWXqz7oY5gKLV1w==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-router/7.0.1/e1e8aaea2efeb32fc7da457cdb6f099ebeecb272} - - '@journeyapps-platform/micro-schema@6.0.1': - resolution: {integrity: sha512-KRZFM4rUhAPqpizfyazX+eWAlf1BwOYibA2ealXKQqXgaTvxTV7oqObmvzhFr4cDuYXV3E+isW/FgOyspjuvYg==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-schema/6.0.1/108672fbde6f092f49a432df20eca259aaf18be0} - - '@journeyapps-platform/micro-signals@3.0.0': - resolution: {integrity: sha512-mt4tzUNyvOnOpJJkrkVJJp15PKAT0O1GmYiUeLs1xm228+9ATzegd/qP8G5GW+qq32izBlfvLuLABBwgveOcpg==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-signals/3.0.0/aa96760b5cb90a59598061eed6b1a7a6c6a29781} - - '@journeyapps-platform/micro-streaming@5.0.1': - resolution: {integrity: sha512-mLyUHsiOUthQXT7ho1UFZmd+fCfeH7eEJbPru2An9jDBHsR+miA9DKWINUrJgshrw+2Eoqvi+QMuMxPNs07RWg==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-streaming/5.0.1/41fa06466599bbc73cd7a1dda22937efb0aec3d6} - - '@journeyapps-platform/micro-system@5.0.0': - resolution: {integrity: sha512-uuRh8pf4wqH+3ru/D7fk/b+HAAK5uGO0psCmOucPCnCthuJBD2d00wbbqjTITLUS6uOB/T16e9CMB4u16UpPxw==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-system/5.0.0/f0c9a562c921508750addbb286663801248155df} - - '@journeyapps-platform/micro-telemetry@2.0.0': - resolution: {integrity: sha512-tkf86QjMTE/PA0qlUdgkqrwigPBvV2Twg+JkOvhHPSfDwdHu0XZuxzvFiRAbzRmeLazFCFubyBFnks0+fagvAw==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-telemetry/2.0.0/f541522bed5b133f5a40ccf64a768fa7b1621416} - - '@journeyapps-platform/micro-tracing@4.0.0': - resolution: {integrity: sha512-WYPTBtf7L2ahyWlzyglLTbT8br26+tva0CTXuxojJureY9tmf0DO6MSC1xakpWwo2hhIBSzpbWY6IThPmaj8jg==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-tracing/4.0.0/d94c5c794a15a0be136358aeacad6c3523275b1a} - - '@journeyapps-platform/micro-utils@5.0.0': - resolution: {integrity: sha512-3aHJZ6eCSdYCMsNhPnwKSLn9yDH7eeLsRd+MogD85IkD6bTOx/COy26T4/KC5aRGvNUX3Ru7lnuT74SImwv1AA==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro-utils/5.0.0/0a84e8b8e05f8791e0354a151316ab71f6290f1c} - - '@journeyapps-platform/micro@17.0.1': - resolution: {integrity: sha512-+wrmB7sDPL6RkL011bKEYwAcqDGRJoXtKB1V1jhOz1P/VyMAda6EsAgcBewPpkvIg9q+JQWvwEg/HvJ32pKKJQ==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/micro/17.0.1/32ed182dcbda322fa5d9de94827b711a278de997} - hasBin: true - - '@journeyapps-platform/types-micro-ws@2.0.0': - resolution: {integrity: sha512-91rA17Orl1Nv55mbEzhXWOfIBidb1hUWNoaXiEeaVQ/M+0HuDj8a+54JpnmIafG1Wp3HNF5ICX2UcmIzTKHB+A==, tarball: https://npm.pkg.github.com/download/@journeyapps-platform/types-micro-ws/2.0.0/89852c246163426360bfe862b41cbaba87198ed5} - '@jridgewell/resolve-uri@3.1.2': resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} engines: {node: '>=6.0.0'} @@ -779,12 +665,6 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - '@js-sdsl/ordered-map@4.4.2': - resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} - - '@jsdevtools/ono@7.1.3': - resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==} - '@ljharb/through@2.3.13': resolution: {integrity: sha512-/gKJun8NNiWGZJkGzI/Ragc53cOdcLNdzjLaIa+GEjguQs0ulsurx8WN0jijdK9yPqDvziX995sMRLyLt1uZMQ==} engines: {node: '>= 0.4'} @@ -852,10 +732,6 @@ packages: resolution: {integrity: sha512-HxjD7xH9iAE4OyhNaaSec65i1H6QZYBWSwWkowFfsc5YAcDvJG30/J1sRKXEQqdmUcKTXEAnA66UciqZha/4+Q==} engines: {node: '>=14'} - '@opentelemetry/api@1.3.0': - resolution: {integrity: sha512-YveTnGNsFFixTKJz09Oi4zYkiLT5af3WpZDu4aIUM7xX+2bHAkOJayFTVQd6zB8kkWPpbua4Ha6Ql00grdLlJQ==} - engines: {node: '>=8.0.0'} - '@opentelemetry/api@1.6.0': resolution: {integrity: sha512-OWlrQAnWn9577PhVgqjUvMr1pg57Bc4jv0iL4w0PRuOSRvq67rvHW9Ie/dZVMvCzhSCB+UxhcY/PmCmFj33Q+g==} engines: {node: '>=8.0.0'} @@ -874,12 +750,6 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' - '@opentelemetry/context-async-hooks@1.8.0': - resolution: {integrity: sha512-ueLmocbWDi1aoU4IPdOQyt4qz/Dx+NYyU4qoa3d683usbnkDLUXYXJFfKIMPFV2BbrI5qtnpTtzErCKewoM8aw==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.4.0' - '@opentelemetry/core@1.17.0': resolution: {integrity: sha512-tfnl3h+UefCgx1aeN2xtrmr6BmdWGKXypk0pflQR0urFS40aE88trnkOMc2HTJZbMrqEEl4HsaBeFhwLVXsrJg==} engines: {node: '>=14'} @@ -898,30 +768,6 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' - '@opentelemetry/core@1.8.0': - resolution: {integrity: sha512-6SDjwBML4Am0AQmy7z1j6HGrWDgeK8awBRUvl1PGw6HayViMk4QpnUXvv4HTHisecgVBy43NE/cstWprm8tIfw==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.4.0' - - '@opentelemetry/exporter-jaeger@1.8.0': - resolution: {integrity: sha512-3h16Sb1T/G33S+RM3yjt1t2xRuu/mi9iB172faS6qFQEclTTJru1pTK4wuWG+9GyI7uyBLfbQoXVA5/BA6gvHw==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - - '@opentelemetry/exporter-metrics-otlp-grpc@0.34.0': - resolution: {integrity: sha512-9INc1TBJ7OwpMsImqUjpPEvQeRyyU9tEiFQIYQ53kKQK7V8MqB5koyDeb5/qBSbNu4ZxSpukAOLPgBOEMDK6Qw==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.3.0 - - '@opentelemetry/exporter-metrics-otlp-http@0.34.0': - resolution: {integrity: sha512-ToRJA4frErHGiKKnPCI3+cvdyK8rksRI+mV6xZ6Yt7HiIrArY9eDX7QaCEZcTLbQIib09LTlCX87TKEL3TToWQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.3.0 - '@opentelemetry/exporter-metrics-otlp-http@0.51.1': resolution: {integrity: sha512-oFXvif9iksHUxrzG3P8ohMLt7xSrl+oDMqxD/3XXndU761RFAKSbRDpfrQs25U5D+A2aMV3qk+4kfUWdJhZ77g==} engines: {node: '>=14'} @@ -940,30 +786,6 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/exporter-trace-otlp-grpc@0.34.0': - resolution: {integrity: sha512-x1V0daRLS6k0dhBPNNLMOP+OSrh8M60Xs9/YkuZS0+/zdbcIjNvPzo/8+dK3zOJx+j1KF0oBX9zxK0SX3PSnZw==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - - '@opentelemetry/exporter-trace-otlp-http@0.34.0': - resolution: {integrity: sha512-MBtUwMvgpdoRo9iqK2eDJ8SP2xKYWeBCSu99s4cc1kg4HKKOpenXLE/6daGsSZ+QTPwd8j+9xMSd+hhBg+Bvzw==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - - '@opentelemetry/exporter-trace-otlp-proto@0.34.0': - resolution: {integrity: sha512-Ump/OyKxq1b4I01aBWSHJw8PCquZAHZh6ykplcmFBs9BZ8DIM7Jl3+zqrS8Vb7YcZ7DZTYORl8Xv/JQoQ+cFlw==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - - '@opentelemetry/exporter-zipkin@1.8.0': - resolution: {integrity: sha512-Y3WqNCZjfWKnHiRzb35sXpDfGL4Gx2qajFAv059s/VFayIPytLHUOrZMiQqrpfzU/TSIKPG4OHJaypFtUtNlQQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - '@opentelemetry/instrumentation-connect@0.37.0': resolution: {integrity: sha512-SeQktDIH5rNzjiEiazWiJAIXkmnLOnNV7wwHpahrqE0Ph+Z3heqMfxRtoMtbdJSIYLfcNZYO51AjxZ00IXufdw==} engines: {node: '>=14'} @@ -994,12 +816,6 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-http@0.34.0': - resolution: {integrity: sha512-sZxpYOggRIFwdcdy1wWBGG8fwiuWWK4j3qv/rdqTwcPvrVT4iSCoPNDMZYxOcxSEP1fybq28SK43e+IKwxVElQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-http@0.52.0': resolution: {integrity: sha512-E6ywZuxTa4LnVXZGwL1oj3e2Eog1yIaNqa8KjKXoGkDNKte9/SjQnePXOmhQYI0A9nf0UyFbP9aKd+yHrkJXUA==} engines: {node: '>=14'} @@ -1018,12 +834,6 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-mongodb@0.33.0': - resolution: {integrity: sha512-bjRF55grOFRn5XQxm1yDL56FD9UVvmIcBDSsgA0dbUr3VOUu3sN7o34t2uDx7EpnfwhMeAvOBO1wbWXdHBzapg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-mongodb@0.45.0': resolution: {integrity: sha512-xnZP9+ayeB1JJyNE9cIiwhOJTzNEsRhXVdLgfzmrs48Chhhk026mQdM5CITfyXSCfN73FGAIB8d91+pflJEfWQ==} engines: {node: '>=14'} @@ -1066,12 +876,6 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation@0.34.0': - resolution: {integrity: sha512-VET/bOh4StOQV4vf1sAvn2JD67BhW2vPZ/ynl2gHXyafme2yB8Hs9+tr1TLzFwNGo7jwMFviFQkZjCYxMuK0AA==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation@0.43.0': resolution: {integrity: sha512-S1uHE+sxaepgp+t8lvIDuRgyjJWisAb733198kwQTUc9ZtYQ2V2gmyCtR1x21ePGVLoMiX/NWY7WA290hwkjJQ==} engines: {node: '>=14'} @@ -1090,54 +894,18 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/otlp-exporter-base@0.34.0': - resolution: {integrity: sha512-xVNvQm7oXeQogeI21iTZRnBrBYS0OVekPutEJgb7jQtHg7x2GWuCBQK9sDo84FRWNXBpNOgSYqsf8/+PxIJ2vA==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - '@opentelemetry/otlp-exporter-base@0.51.1': resolution: {integrity: sha512-UYlnOYyDdzo1Gw559EHCzru0RwhvuXCwoH8jGo9J4gO1TE58GjnEmIjomMsKBCym3qWNJfIQXw+9SZCV0DdQNg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 - '@opentelemetry/otlp-grpc-exporter-base@0.34.0': - resolution: {integrity: sha512-8k3CIVjf2+/kmnQNKIR8GtPIfRsQ5ZxBVh3uKof54stVXH/nX5ArceuQaoEfFoFQ8S8wayBZ1QsBwdab65UK0g==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - - '@opentelemetry/otlp-proto-exporter-base@0.34.0': - resolution: {integrity: sha512-qHnwcAafW8OKeM2a1YQNoL9/sgWVE+JxvMgxf2CtYBqsccIakGPoQ43hLCFLAL3I2Af4BNb5t4KnW8lrtnyUjg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - - '@opentelemetry/otlp-transformer@0.34.0': - resolution: {integrity: sha512-NghPJvn3pVoWBuhWyBe1n/nWIrj1D1EFUH/bIkWEp0CMVWFLux6R+BkRPZQo5klTcj8xFhCZZIZsL/ubkYPryg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.3.0 <1.4.0' - '@opentelemetry/otlp-transformer@0.51.1': resolution: {integrity: sha512-OppYOXwV9LQqqtYUCywqoOqX/JT9LQ5/FMuPZ//eTkvuHdUC4ZMwz2c6uSoT2R90GWvvGnF1iEqTGyTT3xAt2Q==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.9.0' - '@opentelemetry/propagator-b3@1.8.0': - resolution: {integrity: sha512-ffP6AVHyISqK1kiUY1MoVKt43Wp3FJXI8NOePqxBrAU7bRDJ13276VbSl4ugCZbZLTPrPhhSmvQh1WqlfUgcAg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.4.0' - - '@opentelemetry/propagator-jaeger@1.8.0': - resolution: {integrity: sha512-v6GA38k2cqeGAh3368prLW5MsuG2/KxpfWI/PxTPjCa9tThDPq0cvhKpk7cEma3y+F6rieMhwmzZhKQL5QVBzQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.4.0' - '@opentelemetry/redis-common@0.36.2': resolution: {integrity: sha512-faYX1N0gpLhej/6nyp6bgRjzAKXn5GOEMYY7YhciSfCoITAktLUtQ36d24QEWNA1/WA1y6qQunCe0OhHRkVl9g==} engines: {node: '>=14'} @@ -1160,12 +928,6 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' - '@opentelemetry/resources@1.8.0': - resolution: {integrity: sha512-KSyMH6Jvss/PFDy16z5qkCK0ERlpyqixb1xwb73wLMvVq+j7i89lobDjw3JkpCcd1Ws0J6jAI4fw28Zufj2ssg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.4.0' - '@opentelemetry/sdk-logs@0.51.1': resolution: {integrity: sha512-ULQQtl82b673PpZc5/0EtH4V+BrwVOgKJZEB7tYZnGTG3I98tQVk89S9/JSixomDr++F4ih+LSJTCqIKBz+MQQ==} engines: {node: '>=14'} @@ -1185,18 +947,6 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.9.0' - '@opentelemetry/sdk-metrics@1.8.0': - resolution: {integrity: sha512-+KYb+uj0vHhl8xzJO+oChS4oP1e+/2Wl3SXoHoIdcEjd1TQfDV+lxOm4oqxWq6wykXvI35/JHyejxSoT+qxGmg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.3.0 <1.4.0' - - '@opentelemetry/sdk-node@0.34.0': - resolution: {integrity: sha512-4OX2qvOPoK3De2e600Gim46I3PahI6UkD8uZ9hEgSg40egHXKw3keIaFnz1CWkYwa5hhVVIBsoobI41cHfulHA==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.3.0 <1.4.0' - '@opentelemetry/sdk-trace-base@1.24.1': resolution: {integrity: sha512-zz+N423IcySgjihl2NfjBf0qw1RWe11XIAWVrTNOSSI6dtSPJiVom2zipFB2AEEtJWpv0Iz6DY6+TjnyTV5pWg==} engines: {node: '>=14'} @@ -1209,18 +959,6 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' - '@opentelemetry/sdk-trace-base@1.8.0': - resolution: {integrity: sha512-iH41m0UTddnCKJzZx3M85vlhKzRcmT48pUeBbnzsGrq4nIay1oWVHKM5nhB5r8qRDGvd/n7f/YLCXClxwM0tvA==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.4.0' - - '@opentelemetry/sdk-trace-node@1.8.0': - resolution: {integrity: sha512-6FqhJEgW9Nke5SO4Ul9+5EWOfms/JeLg5LRqILMPMK4UMBWcOtk7jldvGGyfVpraJ16/WPo/R5NSnMwlupN5zQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.4.0' - '@opentelemetry/semantic-conventions@1.17.0': resolution: {integrity: sha512-+fguCd2d8d2qruk0H0DsCEy2CTK3t0Tugg7MhZ/UQMvmewbZLNnJ6heSYyzIZWG5IPfAXzoj4f4F/qpM7l4VBA==} engines: {node: '>=14'} @@ -1233,10 +971,6 @@ packages: resolution: {integrity: sha512-M+kkXKRAIAiAP6qYyesfrC5TOmDpDVtsxuGfPcqd9B/iBrac+E14jYwrgm0yZBUIbIP2OnqC3j+UgkXLm1vxUQ==} engines: {node: '>=14'} - '@opentelemetry/semantic-conventions@1.8.0': - resolution: {integrity: sha512-TYh1MRcm4JnvpqtqOwT9WYaBYY4KERHdToxs/suDTLviGRsQkIjS5yYROTYTSJQUnYLOn/TuOh5GoMwfLSU+Ew==} - engines: {node: '>=14'} - '@opentelemetry/sql-common@0.40.1': resolution: {integrity: sha512-nSDlnHSqzC3pXn/wZEZVLuAuJ1MYMXPBwtv2qAbCa3847SaHItdE7SzUq/Jtb0KZmh1zfAbNi3AAMjztTT4Ugg==} engines: {node: '>=14'} @@ -1262,36 +996,6 @@ packages: '@prisma/instrumentation@5.15.0': resolution: {integrity: sha512-fCWOOOajTKOUEp43gRmBqwt6oN9bPJcLiloi2OG/2ED0N5z62Cuza6FDrlm3SJHQAXYlXqLE0HLdEE5WcUkOzg==} - '@protobufjs/aspromise@1.1.2': - resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} - - '@protobufjs/base64@1.1.2': - resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} - - '@protobufjs/codegen@2.0.4': - resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} - - '@protobufjs/eventemitter@1.1.0': - resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} - - '@protobufjs/fetch@1.1.0': - resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} - - '@protobufjs/float@1.0.2': - resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} - - '@protobufjs/inquire@1.1.0': - resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} - - '@protobufjs/path@1.1.2': - resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} - - '@protobufjs/pool@1.1.0': - resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} - - '@protobufjs/utf8@1.1.0': - resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} - '@rollup/rollup-android-arm-eabi@4.18.0': resolution: {integrity: sha512-Tya6xypR10giZV1XzxmH5wr25VcZSncG0pZIjfePT0OVBvqNEurzValetGNarVrGiq66EBVAFn15iYX4w6FKgQ==} cpu: [arm] @@ -1372,26 +1076,10 @@ packages: cpu: [x64] os: [win32] - '@sentry-internal/tracing@7.116.0': - resolution: {integrity: sha512-y5ppEmoOlfr77c/HqsEXR72092qmGYS4QE5gSz5UZFn9CiinEwGfEorcg2xIrrCuU7Ry/ZU2VLz9q3xd04drRA==} - engines: {node: '>=8'} - - '@sentry/core@7.116.0': - resolution: {integrity: sha512-J6Wmjjx+o7RwST0weTU1KaKUAlzbc8MGkJV1rcHM9xjNTWTva+nrcCM3vFBagnk2Gm/zhwv3h0PvWEqVyp3U1Q==} - engines: {node: '>=8'} - '@sentry/core@8.9.2': resolution: {integrity: sha512-ixm8NISFlPlEo3FjSaqmq4nnd13BRHoafwJ5MG+okCz6BKGZ1SexEggP42/QpGvDprUUHnfncG6WUMgcarr1zA==} engines: {node: '>=14.18'} - '@sentry/integrations@7.116.0': - resolution: {integrity: sha512-UZb60gaF+7veh1Yv79RiGvgGYOnU6xA97H+hI6tKgc1uT20YpItO4X56Vhp0lvyEyUGFZzBRRH1jpMDPNGPkqw==} - engines: {node: '>=8'} - - '@sentry/node@7.116.0': - resolution: {integrity: sha512-HB/4TrJWbnu6swNzkid+MlwzLwY/D/klGt3R0aatgrgWPo2jJm6bSl4LUT39Cr2eg5I1gsREQtXE2mAlC6gm8w==} - engines: {node: '>=8'} - '@sentry/node@8.9.2': resolution: {integrity: sha512-Q+JBpR4yx3eUyyhwgugucfRtPg65gYvzJGEmjzcnDJXJqX8ms4HPpNv9o2Om7A4014JxIibUdrQ+p5idcT7SZA==} engines: {node: '>=14.18'} @@ -1406,18 +1094,10 @@ packages: '@opentelemetry/sdk-trace-base': ^1.25.0 '@opentelemetry/semantic-conventions': ^1.25.0 - '@sentry/types@7.116.0': - resolution: {integrity: sha512-QCCvG5QuQrwgKzV11lolNQPP2k67Q6HHD9vllZ/C4dkxkjoIym8Gy+1OgAN3wjsR0f/kG9o5iZyglgNpUVRapQ==} - engines: {node: '>=8'} - '@sentry/types@8.9.2': resolution: {integrity: sha512-+LFOyQGl+zk5SZRGZD2MEURf7i5RHgP/mt3s85Rza+vz8M211WJ0YsjkIGUJFSY842nged5QLx4JysLaBlLymg==} engines: {node: '>=14.18'} - '@sentry/utils@7.116.0': - resolution: {integrity: sha512-Vn9fcvwTq91wJvCd7WTMWozimqMi+dEZ3ie3EICELC2diONcN16ADFdzn65CQQbYwmUzRjN9EjDN2k41pKZWhQ==} - engines: {node: '>=8'} - '@sentry/utils@8.9.2': resolution: {integrity: sha512-A4srR9mEBFdVXwSEKjQ94msUbVkMr8JeFiEj9ouOFORw/Y/ux/WV2bWVD/ZI9wq0TcTNK8L1wBgU8UMS5lIq3A==} engines: {node: '>=14.18'} @@ -1521,9 +1201,6 @@ packages: '@types/http-errors@2.0.4': resolution: {integrity: sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==} - '@types/json-schema@7.0.15': - resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} - '@types/keygrip@1.0.6': resolution: {integrity: sha512-lZuNAY9xeJt7Bx4t4dx0rYCDqGPW8RXhQZK1td7d4H6E9zYbLoOtjBvfwdTKpsyxQI/2jv+armjX/RW+ZNpXOQ==} @@ -1636,10 +1313,6 @@ packages: abstract-logging@2.0.1: resolution: {integrity: sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA==} - accepts@1.3.8: - resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} - engines: {node: '>= 0.6'} - acorn-import-assertions@1.9.0: resolution: {integrity: sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==} peerDependencies: @@ -1693,9 +1366,6 @@ packages: ansi-align@3.0.1: resolution: {integrity: sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==} - ansi-color@0.2.1: - resolution: {integrity: sha512-bF6xLaZBLpOQzgYUtYEhJx090nPSZk1BQ/q2oyBK9aMMcJHzx9uXGCjI2Y+LebsN4Jwoykr0V9whbPiogdyHoQ==} - ansi-colors@4.1.3: resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} engines: {node: '>=6'} @@ -1753,9 +1423,6 @@ packages: resolution: {integrity: sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==} engines: {node: '>= 0.4'} - array-flatten@1.1.1: - resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==} - array-union@2.1.0: resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} engines: {node: '>=8'} @@ -1815,10 +1482,6 @@ packages: bl@4.1.0: resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} - body-parser@1.20.2: - resolution: {integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==} - engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} - boxen@7.1.1: resolution: {integrity: sha512-2hCgjEmP8YLWQ130n2FerGv7rYpfBmnmp9Uy2Le1vge6X3gZIfSmEzP5QTDElFxcvVcXlEn8Aq6MU/PZygIOog==} engines: {node: '>=14.16'} @@ -1849,14 +1512,6 @@ packages: buffer@6.0.3: resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - bufrw@1.4.0: - resolution: {integrity: sha512-sWm8iPbqvL9+5SiYxXH73UOkyEbGQg7kyHQmReF89WJHQJw2eV4P/yZ0E+b71cczJ4pPobVhXxgQcmfSTgGHxQ==} - engines: {node: '>= 0.10.x'} - - bytes@3.1.2: - resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} - engines: {node: '>= 0.8'} - cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -1881,9 +1536,6 @@ packages: resolution: {integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==} engines: {node: '>= 0.4'} - call-me-maybe@1.0.2: - resolution: {integrity: sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ==} - camelcase-keys@6.2.2: resolution: {integrity: sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==} engines: {node: '>=8'} @@ -2029,17 +1681,6 @@ packages: console-control-strings@1.1.0: resolution: {integrity: sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==} - content-disposition@0.5.4: - resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==} - engines: {node: '>= 0.6'} - - content-type@1.0.5: - resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} - engines: {node: '>= 0.6'} - - cookie-signature@1.0.6: - resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} - cookie@0.6.0: resolution: {integrity: sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==} engines: {node: '>= 0.6'} @@ -2102,14 +1743,6 @@ packages: resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==} engines: {node: '>=0.11'} - debug@2.6.9: - resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} engines: {node: '>=6.0'} @@ -2157,14 +1790,6 @@ packages: delegates@1.0.0: resolution: {integrity: sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==} - depd@2.0.0: - resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} - engines: {node: '>= 0.8'} - - destroy@1.2.0: - resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} - engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} - detect-indent@6.1.0: resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} engines: {node: '>=8'} @@ -2201,9 +1826,6 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - ee-first@1.1.1: - resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} - emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} @@ -2213,10 +1835,6 @@ packages: enabled@2.0.0: resolution: {integrity: sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==} - encodeurl@1.0.2: - resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} - engines: {node: '>= 0.8'} - encoding@0.1.13: resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==} @@ -2234,9 +1852,6 @@ packages: error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} - error@7.0.2: - resolution: {integrity: sha512-UtVv4l5MhijsYUxPJo4390gzfZvAnTHreNnDjnTZaKIiZ/SemXxAhBkYSKtWa5RtBXbLP8tMgn/n0RUa/H7jXw==} - es-abstract@1.23.3: resolution: {integrity: sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==} engines: {node: '>= 0.4'} @@ -2277,9 +1892,6 @@ packages: resolution: {integrity: sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==} engines: {node: '>=12'} - escape-html@1.0.3: - resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} - escape-string-regexp@1.0.5: resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} engines: {node: '>=0.8.0'} @@ -2289,10 +1901,6 @@ packages: engines: {node: '>=4'} hasBin: true - etag@1.8.1: - resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} - engines: {node: '>= 0.6'} - event-stream@3.3.4: resolution: {integrity: sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g==} @@ -2307,10 +1915,6 @@ packages: exponential-backoff@3.1.1: resolution: {integrity: sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==} - express@4.19.2: - resolution: {integrity: sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==} - engines: {node: '>= 0.10.0'} - extendable-error@0.1.7: resolution: {integrity: sha512-UOiS2in6/Q0FK0R0q6UY9vYpQ21mr/Qn1KOnte7vsACuNJf514WvCCUHSRCPcgjPT2bAhNIJdlE6bVap1GKmeg==} @@ -2367,10 +1971,6 @@ packages: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} - finalhandler@1.2.0: - resolution: {integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==} - engines: {node: '>= 0.8'} - find-my-way@7.7.0: resolution: {integrity: sha512-+SrHpvQ52Q6W9f3wJoJBbAQULJuNEEQwBvlvYwACDhBTLOTMiQ0HYWh4+vC3OivGP2ENcTI1oKlFA2OepJNjhQ==} engines: {node: '>=14'} @@ -2412,10 +2012,6 @@ packages: resolution: {integrity: sha512-+yRYRhpnFPWXSly/6V4Lw9IfOV26uu30kynGJ03PW+MnjOEQe45RZ141QcS0aJehYBYA50GfCDnsRbFJdhssRw==} engines: {node: '>=10'} - fresh@0.5.2: - resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} - engines: {node: '>= 0.6'} - from@0.1.7: resolution: {integrity: sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g==} @@ -2571,11 +2167,6 @@ packages: resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} engines: {node: '>= 0.4'} - hexer@1.5.0: - resolution: {integrity: sha512-dyrPC8KzBzUJ19QTIo1gXNqIISRXQ0NwteW6OeQHRN4ZuZeHkdODfj0zHBdOlHbRY8GqbqK57C9oWSvQZizFsg==} - engines: {node: '>= 0.10.x'} - hasBin: true - hosted-git-info@2.8.9: resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} @@ -2590,10 +2181,6 @@ packages: http-cache-semantics@4.1.1: resolution: {integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==} - http-errors@2.0.0: - resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==} - engines: {node: '>= 0.8'} - http-proxy-agent@5.0.0: resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==} engines: {node: '>= 6'} @@ -2634,9 +2221,6 @@ packages: resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==} engines: {node: '>= 4'} - immediate@3.0.6: - resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==} - import-in-the-middle@1.4.2: resolution: {integrity: sha512-9WOz1Yh/cvO/p69sxRmhyQwrIGGSp7EIdcb+fFNVi7CzQGQB8U1/1XrKVSbEd/GNOAeM0peJtmi7+qphe7NvAw==} @@ -2855,10 +2439,6 @@ packages: resolution: {integrity: sha512-kWmLKn2tRtfYMF/BakihVVRzBKOxz4gJMiL2Rj91WnAB5TPZumSH99R/Yf1qE1u4uRimvCSJfm6hnxohXeEXjQ==} engines: {node: '>=14'} - jaeger-client@3.19.0: - resolution: {integrity: sha512-M0c7cKHmdyEUtjemnJyx/y9uX16XHocL46yQvyqDlPdvAcwPDbHrIbKjQdBqtiE4apQ/9dmr+ZLJYYPGnurgpw==} - engines: {node: '>=10'} - jju@1.4.0: resolution: {integrity: sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==} @@ -2917,10 +2497,6 @@ packages: resolution: {integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==} engines: {node: '>=0.10.0'} - kafkajs@2.2.4: - resolution: {integrity: sha512-j/YeapB1vfPT2iOIUn/vxdyKEuhuY2PxMBvf5JWux6iSaukAccrMtXEY/Lb7OvavDhOWME589bpLrEdnVHjfjA==} - engines: {node: '>=14.0.0'} - keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} @@ -2943,9 +2519,6 @@ packages: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} engines: {node: '>=6'} - lie@3.1.1: - resolution: {integrity: sha512-RiNhHysUjhrDQntfYSfY4MU24coXXdEOgw9WGcKHNeEwffDYbF//u87M1EWaMGzuFoSbqW0C9C6lEEhDOAswfw==} - light-my-request@5.13.0: resolution: {integrity: sha512-9IjUN9ZyCS9pTG+KqTDEQo68Sui2lHsYBrfMyVUTTZ3XhH8PMZq7xO94Kr+eP9dhi/kcKsx4N41p2IXEBil1pQ==} @@ -2960,9 +2533,6 @@ packages: resolution: {integrity: sha512-SFppqq5p42fe2qcZQqqEOiVRXl+WCP1MdT6k7BDEW1j++sp5fIY+/fdRQitvKgB5BrBcmrs5m/L0v2FrU5MY1g==} engines: {node: '>=14'} - localforage@1.10.0: - resolution: {integrity: sha512-14/H1aX7hzBBmmh7sGPd+AOMkkIrHM3Z1PAyGgZigA1H1p5O5ANnMyWzvpAETtG68/dC4pC0ncy3+PPGzXZHPg==} - locate-path@5.0.0: resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} engines: {node: '>=8'} @@ -2971,9 +2541,6 @@ packages: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} engines: {node: '>=10'} - lodash.camelcase@4.3.0: - resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} - lodash.merge@4.6.2: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} @@ -2987,21 +2554,10 @@ packages: resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==} engines: {node: '>=10'} - logfmt@1.4.0: - resolution: {integrity: sha512-p1Ow0C2dDJYaQBhRHt+HVMP6ELuBm4jYSYNHPMfz0J5wJ9qA6/7oBOlBZBfT1InqguTYcvJzNea5FItDxTcbyw==} - hasBin: true - logform@2.6.0: resolution: {integrity: sha512-1ulHeNPp6k/LD8H91o7VYFBng5i1BDE7HoKxVbZiGFidS1Rj65qcywLxX+pVfAPoQJEjRdvKcusKwOupHCVOVQ==} engines: {node: '>= 12.0.0'} - long@2.4.0: - resolution: {integrity: sha512-ijUtjmO/n2A5PaosNG9ZGDsQ3vxJg7ZW8vsY8Kp0f2yIZWhSJvjmegV7t+9RPQKxKrvj8yKGehhS+po14hPLGQ==} - engines: {node: '>=0.6'} - - long@5.2.3: - resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==} - lossless-json@2.0.11: resolution: {integrity: sha512-BP0vn+NGYvzDielvBZaFain/wgeJ1hTvURCqtKvhr1SCPePdaaTanmmcplrHfEJSJOUql7hk4FHwToNJjWRY3g==} @@ -3048,10 +2604,6 @@ packages: map-stream@0.1.0: resolution: {integrity: sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g==} - media-typer@0.3.0: - resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==} - engines: {node: '>= 0.6'} - memory-pager@1.5.0: resolution: {integrity: sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==} @@ -3059,34 +2611,14 @@ packages: resolution: {integrity: sha512-3YffViIt2QWgTy6Pale5QpopX/IvU3LPL03jOTqp6pGj3VjesdO/U8CuHMKpnQr4shCNCM5fd5XFFvIIl6JBHg==} engines: {node: '>=8'} - merge-descriptors@1.0.1: - resolution: {integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==} - merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} - methods@1.1.2: - resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} - engines: {node: '>= 0.6'} - micromatch@4.0.7: resolution: {integrity: sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==} engines: {node: '>=8.6'} - mime-db@1.52.0: - resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} - engines: {node: '>= 0.6'} - - mime-types@2.1.35: - resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} - engines: {node: '>= 0.6'} - - mime@1.6.0: - resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} - engines: {node: '>=4'} - hasBin: true - mimic-fn@2.1.0: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} engines: {node: '>=6'} @@ -3215,9 +2747,6 @@ packages: moo@0.5.2: resolution: {integrity: sha512-iSAJLHYKnX41mKcJKjqvnAN9sf0LMDTXDEvFv+ffuRR9a1MIuXLjMNL6EsnDHSkKLTWNqQQ5uo61P4EbU4NU+Q==} - ms@2.0.0: - resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} - ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} @@ -3248,15 +2777,6 @@ packages: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} - node-fetch@2.7.0: - resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true - node-fetch@3.3.2: resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -3266,9 +2786,6 @@ packages: engines: {node: ^12.13 || ^14.13 || >=16} hasBin: true - node-int64@0.4.0: - resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} - nodemon@3.1.1: resolution: {integrity: sha512-k43xGaDtaDIcufn0Fc6fTtsdKSkV/hQzoQFigNH//GaKta28yoKVYXCnV+KXRqfT/YzsFaQU9VdeEG+HEyxr6A==} engines: {node: '>=10'} @@ -3357,10 +2874,6 @@ packages: resolution: {integrity: sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==} engines: {node: '>=14.0.0'} - on-finished@2.4.1: - resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} - engines: {node: '>= 0.8'} - once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -3375,10 +2888,6 @@ packages: resolution: {integrity: sha512-aiSt/4ubOTyb1N5C2ZbGrBvaJOXIZhZvpRPYuUVxQJe27wJZqf/o65iPrqgLcgfeOLaQ8cS2Q+762jrYvniTrA==} engines: {node: '>18.0.0'} - opentracing@0.14.7: - resolution: {integrity: sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q==} - engines: {node: '>=0.10'} - ora@5.4.1: resolution: {integrity: sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==} engines: {node: '>=10'} @@ -3448,10 +2957,6 @@ packages: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} engines: {node: '>=8'} - parseurl@1.3.3: - resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} - engines: {node: '>= 0.8'} - path-exists@4.0.0: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} @@ -3471,9 +2976,6 @@ packages: resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} engines: {node: '>=16 || 14 >=14.18'} - path-to-regexp@0.1.7: - resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==} - path-type@4.0.0: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} @@ -3584,10 +3086,6 @@ packages: process-warning@3.0.0: resolution: {integrity: sha512-mqn0kFRl0EoqhnL0GQ0veqFHyIN1yig9RHh/InzORTUiZHFRAur+aMtRkELNwGs9aNwKS6tg/An4NYBPGwvtzQ==} - process@0.10.1: - resolution: {integrity: sha512-dyIett8dgGIZ/TXKUzeYExt7WA6ldDzys9vTDU/cCA9L17Ypme+KzS+NjQCjpn9xsvi/shbMC+yP/BcFMBz0NA==} - engines: {node: '>= 0.6.0'} - process@0.11.10: resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} engines: {node: '>= 0.6.0'} @@ -3615,14 +3113,6 @@ packages: proto-list@1.2.4: resolution: {integrity: sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==} - protobufjs@7.1.1: - resolution: {integrity: sha512-d0nMQqS/aT3lfV8bKi9Gbg73vPd2LcDdTDOu6RE/M+h9DY8g1EmDzk3ADPccthEWfTBjkR2oxNdx9Gs8YubT+g==} - engines: {node: '>=12.0.0'} - - protobufjs@7.3.0: - resolution: {integrity: sha512-YWD03n3shzV9ImZRX3ccbjqLxj7NokGN0V/ESiBV5xWqrommYHYiihuIyavq03pWSGqlyvYUFmfoMKd+1rPA/g==} - engines: {node: '>=12.0.0'} - proxy-addr@2.0.7: resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} engines: {node: '>= 0.10'} @@ -3646,10 +3136,6 @@ packages: resolution: {integrity: sha512-FLpr4flz5xZTSJxSeaheeMKN/EDzMdK7b8PTOC6a5PYFKTucWbdqjgqaEyH0shFiSJrVB1+Qqi4Tk19ccU6Aug==} engines: {node: '>=12.20'} - qs@6.11.0: - resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==} - engines: {node: '>=0.6'} - queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} @@ -3671,14 +3157,6 @@ packages: resolution: {integrity: sha512-80WNmd9DA0tmZrw9qQa62GPPWfuXJknrmVmLcxvq4uZBdYqb1wYoKTmnlGUchvVWe0XiLupYkBoXVOxz3C8DYQ==} engines: {node: '>=0.12'} - range-parser@1.2.1: - resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} - engines: {node: '>= 0.6'} - - raw-body@2.5.2: - resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==} - engines: {node: '>= 0.8'} - rc-config-loader@4.1.3: resolution: {integrity: sha512-kD7FqML7l800i6pS6pvLyIE2ncbk9Du8Q0gp/4hMPhJU6ZxApkoLcGD8ZeqgiAlfwZ6BlETq6qqe+12DUL207w==} @@ -3763,10 +3241,6 @@ packages: resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} engines: {node: '>=0.10.0'} - require-in-the-middle@5.2.0: - resolution: {integrity: sha512-efCx3b+0Z69/LGJmm9Yvi4cqEdxnoGnxYxGxBghkkTTFeXRtTCmmhO0AnAfHz59k957uTSuy8WaHqOs8wbYUWg==} - engines: {node: '>=6'} - require-in-the-middle@7.3.0: resolution: {integrity: sha512-nQFEv9gRw6SJAwWD2LrL0NmQvAcO7FBwJbwmr2ttPAacfy0xuiOjE5zt+zM4xDyuyvUaxBi/9gb2SoCyNEVJcw==} engines: {node: '>=8.6.0'} @@ -3891,14 +3365,6 @@ packages: engines: {node: '>=10'} hasBin: true - send@0.18.0: - resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==} - engines: {node: '>= 0.8.0'} - - serve-static@1.15.0: - resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==} - engines: {node: '>= 0.8.0'} - set-blocking@2.0.0: resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} @@ -3913,9 +3379,6 @@ packages: resolution: {integrity: sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==} engines: {node: '>= 0.4'} - setprototypeof@1.2.0: - resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} - shebang-command@1.2.0: resolution: {integrity: sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==} engines: {node: '>=0.10.0'} @@ -4031,9 +3494,6 @@ packages: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} - split@0.2.10: - resolution: {integrity: sha512-e0pKq+UUH2Xq/sXbYpZBZc3BawsfDZ7dgv+JtRTUPNcvF5CMR4Y9cvJqkMY0MoxWzTHvZuz1beg6pNEKlszPiQ==} - split@0.3.3: resolution: {integrity: sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA==} @@ -4057,10 +3517,6 @@ packages: stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} - statuses@2.0.1: - resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} - engines: {node: '>= 0.8'} - std-env@3.7.0: resolution: {integrity: sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==} @@ -4074,9 +3530,6 @@ packages: resolution: {integrity: sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==} engines: {node: '>=0.6.19'} - string-template@0.2.1: - resolution: {integrity: sha512-Yptehjogou2xm4UJbxJ4CxgZx12HBfeystp0y3x7s4Dj32ltVVG1Gg8YhKjHZkHicuKpZX/ffilA8505VbUbpw==} - string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} @@ -4162,11 +3615,6 @@ packages: thread-stream@2.7.0: resolution: {integrity: sha512-qQiRWsU/wvNolI6tbbCKd9iKaTnCXsTwVxhhKM6nctPdujTyztjlbUkUTUymidWcMnZ5pWR0ej4a0tjsW021vw==} - thriftrw@3.11.4: - resolution: {integrity: sha512-UcuBd3eanB3T10nXWRRMwfwoaC6VMk7qe3/5YIWP2Jtw+EbHqJ0p1/K3x8ixiR5dozKSSfcg1W+0e33G1Di3XA==} - engines: {node: '>= 0.10.x'} - hasBin: true - through2@2.0.5: resolution: {integrity: sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==} @@ -4196,17 +3644,10 @@ packages: resolution: {integrity: sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw==} engines: {node: '>=12'} - toidentifier@1.0.1: - resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} - engines: {node: '>=0.6'} - touch@3.1.1: resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==} hasBin: true - tr46@0.0.3: - resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} - tr46@4.1.1: resolution: {integrity: sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==} engines: {node: '>=14'} @@ -4311,10 +3752,6 @@ packages: resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} engines: {node: '>=12.20'} - type-is@1.6.18: - resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} - engines: {node: '>= 0.6'} - typed-array-buffer@1.0.2: resolution: {integrity: sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==} engines: {node: '>= 0.4'} @@ -4377,10 +3814,6 @@ packages: resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} engines: {node: '>= 4.0.0'} - unpipe@1.0.0: - resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} - engines: {node: '>= 0.8'} - untildify@4.0.0: resolution: {integrity: sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==} engines: {node: '>=8'} @@ -4395,14 +3828,6 @@ packages: util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} - utils-merge@1.0.1: - resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} - engines: {node: '>= 0.4.0'} - - uuid@8.3.2: - resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} - hasBin: true - uuid@9.0.1: resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} hasBin: true @@ -4500,9 +3925,6 @@ packages: resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==} engines: {node: '>= 8'} - webidl-conversions@3.0.1: - resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} - webidl-conversions@7.0.0: resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==} engines: {node: '>=12'} @@ -4511,9 +3933,6 @@ packages: resolution: {integrity: sha512-9WWbymnqj57+XEuqADHrCJ2eSXzn8WXIW/YSGaZtb2WKAInQ6CHfaUUcTyyver0p8BDg5StLQq8h1vtZuwmOig==} engines: {node: '>=16'} - whatwg-url@5.0.0: - resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} - which-boxed-primitive@1.0.2: resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} @@ -4608,9 +4027,6 @@ packages: resolution: {integrity: sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ==} engines: {node: '>=12'} - xorshift@1.2.0: - resolution: {integrity: sha512-iYgNnGyeeJ4t6U11NpA/QiKy+PXn5Aa3Azg5qkwIFz1tBLllQrjjsk9yzD7IAK0naNU4JxdeDgqW9ov4u/hc4g==} - xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} @@ -4674,13 +4090,6 @@ packages: snapshots: - '@apidevtools/json-schema-ref-parser@9.1.2': - dependencies: - '@jsdevtools/ono': 7.1.3 - '@types/json-schema': 7.0.15 - call-me-maybe: 1.0.2 - js-yaml: 4.1.0 - '@babel/code-frame@7.24.6': dependencies: '@babel/highlight': 7.24.6 @@ -4933,354 +4342,43 @@ snapshots: '@fastify/ajv-compiler@3.5.0': dependencies: - ajv: 8.14.0 - ajv-formats: 2.1.1(ajv@8.14.0) - fast-uri: 2.3.0 - - '@fastify/cors@8.4.1': - dependencies: - fastify-plugin: 4.5.1 - mnemonist: 0.39.5 - - '@fastify/error@3.4.1': {} - - '@fastify/fast-json-stringify-compiler@4.3.0': - dependencies: - fast-json-stringify: 5.16.0 - - '@fastify/merge-json-schemas@0.1.1': - dependencies: - fast-deep-equal: 3.1.3 - - '@gar/promisify@1.1.3': {} - - '@grpc/grpc-js@1.10.8': - dependencies: - '@grpc/proto-loader': 0.7.13 - '@js-sdsl/ordered-map': 4.4.2 - - '@grpc/proto-loader@0.7.13': - dependencies: - lodash.camelcase: 4.3.0 - long: 5.2.3 - protobufjs: 7.3.0 - yargs: 17.7.2 - - '@humanwhocodes/momoa@2.0.4': {} - - '@inquirer/figures@1.0.2': {} - - '@isaacs/cliui@8.0.2': - dependencies: - string-width: 5.1.2 - string-width-cjs: string-width@4.2.3 - strip-ansi: 7.1.0 - strip-ansi-cjs: strip-ansi@6.0.1 - wrap-ansi: 8.1.0 - wrap-ansi-cjs: wrap-ansi@7.0.0 - - '@jest/schemas@29.6.3': - dependencies: - '@sinclair/typebox': 0.27.8 - - '@journeyapps-platform/micro-alerts@4.0.0': - dependencies: - '@journeyapps-platform/micro-async-hooks': 2.0.0 - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-tracing': 4.0.0 - '@sentry/node': 7.116.0 - lodash: 4.17.21 - - '@journeyapps-platform/micro-async-hooks@2.0.0': - dependencies: - lodash: 4.17.21 - - '@journeyapps-platform/micro-authorizers@6.0.0(encoding@0.1.13)': - dependencies: - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-schema': 6.0.1 - jose: 4.15.5 - lru-cache: 7.18.3 - node-fetch: 2.7.0(encoding@0.1.13) - transitivePeerDependencies: - - encoding - - '@journeyapps-platform/micro-codecs@3.0.1': - dependencies: - bson: 6.7.0 - ts-codec: 1.2.2 - - '@journeyapps-platform/micro-db@3.0.2': - dependencies: - '@journeyapps-platform/micro-utils': 5.0.0 - bson: 6.7.0 - - '@journeyapps-platform/micro-errors@3.0.0': {} - - '@journeyapps-platform/micro-events@2.0.1(socks@2.8.3)': - dependencies: - '@journeyapps-platform/micro-alerts': 4.0.0 - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-kafka': 8.0.1 - '@journeyapps-platform/micro-locks': 2.0.1(socks@2.8.3) - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-schema': 6.0.1 - '@journeyapps-platform/micro-streaming': 5.0.1 - '@journeyapps-platform/micro-tracing': 4.0.0 - '@opentelemetry/api': 1.3.0 - bson: 6.7.0 - kafkajs: 2.2.4 - lodash: 4.17.21 - mongodb: 6.7.0(socks@2.8.3) - transitivePeerDependencies: - - '@aws-sdk/credential-providers' - - '@mongodb-js/zstd' - - gcp-metadata - - kerberos - - mongodb-client-encryption - - snappy - - socks - - '@journeyapps-platform/micro-kafka@8.0.1': - dependencies: - '@journeyapps-platform/micro-alerts': 4.0.0 - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-metrics': 5.0.0 - '@journeyapps-platform/micro-schema': 6.0.1 - '@journeyapps-platform/micro-tracing': 4.0.0 - '@opentelemetry/api': 1.3.0 - bson: 6.7.0 - kafkajs: 2.2.4 - lodash: 4.17.21 - uuid: 9.0.1 - - '@journeyapps-platform/micro-locks@2.0.1(socks@2.8.3)': - dependencies: - bson: 6.7.0 - mongodb: 6.7.0(socks@2.8.3) - transitivePeerDependencies: - - '@aws-sdk/credential-providers' - - '@mongodb-js/zstd' - - gcp-metadata - - kerberos - - mongodb-client-encryption - - snappy - - socks - - '@journeyapps-platform/micro-logger@3.0.0': - dependencies: - '@journeyapps-platform/micro-async-hooks': 2.0.0 - '@journeyapps-platform/micro-errors': 3.0.0 - '@opentelemetry/api': 1.3.0 - chalk: 4.1.2 - logfmt: 1.4.0 - - '@journeyapps-platform/micro-metrics@5.0.0': - dependencies: - '@opentelemetry/api': 1.3.0 - - '@journeyapps-platform/micro-migrate@4.0.1(socks@2.8.3)': - dependencies: - '@journeyapps-platform/micro-async-hooks': 2.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-mongo': 5.0.1(socks@2.8.3) - yargs: 17.7.2 - transitivePeerDependencies: - - '@aws-sdk/credential-providers' - - '@mongodb-js/zstd' - - gcp-metadata - - kerberos - - mongodb-client-encryption - - snappy - - socks - - '@journeyapps-platform/micro-mongo@5.0.1(socks@2.8.3)': - dependencies: - '@journeyapps-platform/micro-db': 3.0.2 - bson: 6.7.0 - mongodb: 6.7.0(socks@2.8.3) - transitivePeerDependencies: - - '@aws-sdk/credential-providers' - - '@mongodb-js/zstd' - - gcp-metadata - - kerberos - - mongodb-client-encryption - - snappy - - socks - - '@journeyapps-platform/micro-repl@4.0.0': - dependencies: - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-system': 5.0.0 - - '@journeyapps-platform/micro-router-docs@2.0.0': - dependencies: - '@journeyapps-platform/micro-router': 7.0.1 - '@journeyapps-platform/micro-schema': 6.0.1 - - '@journeyapps-platform/micro-router-express@9.0.0': - dependencies: - '@journeyapps-platform/micro-alerts': 4.0.0 - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-metrics': 5.0.0 - '@journeyapps-platform/micro-router': 7.0.1 - '@journeyapps-platform/micro-schema': 6.0.1 - '@journeyapps-platform/micro-tracing': 4.0.0 - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.3.0) - body-parser: 1.20.2 - cors: 2.8.5 - express: 4.19.2 - lodash: 4.17.21 - transitivePeerDependencies: - - supports-color + ajv: 8.14.0 + ajv-formats: 2.1.1(ajv@8.14.0) + fast-uri: 2.3.0 - '@journeyapps-platform/micro-router-fastify@3.0.0': - dependencies: - '@fastify/cors': 8.4.1 - '@journeyapps-platform/micro-alerts': 4.0.0 - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-metrics': 5.0.0 - '@journeyapps-platform/micro-router': 7.0.1 - '@journeyapps-platform/micro-signals': 3.0.0 - '@journeyapps-platform/micro-tracing': 4.0.0 - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.3.0) - fastify: 4.23.2 + '@fastify/cors@8.4.1': + dependencies: fastify-plugin: 4.5.1 + mnemonist: 0.39.5 - '@journeyapps-platform/micro-router-ws@8.0.0': - dependencies: - '@journeyapps-platform/micro-alerts': 4.0.0 - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-metrics': 5.0.0 - '@journeyapps-platform/micro-router': 7.0.1 - '@journeyapps-platform/micro-schema': 6.0.1 - '@journeyapps-platform/micro-tracing': 4.0.0 - '@journeyapps-platform/types-micro-ws': 2.0.0 - '@opentelemetry/api': 1.3.0 - async: 3.2.5 - ws: 8.17.0 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - '@journeyapps-platform/micro-router@7.0.1': - dependencies: - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-schema': 6.0.1 - '@journeyapps-platform/micro-signals': 3.0.0 - '@journeyapps-platform/micro-streaming': 5.0.1 - '@journeyapps-platform/micro-tracing': 4.0.0 - '@opentelemetry/api': 1.3.0 - async: 3.2.5 + '@fastify/error@3.4.1': {} - '@journeyapps-platform/micro-schema@6.0.1': + '@fastify/fast-json-stringify-compiler@4.3.0': dependencies: - '@apidevtools/json-schema-ref-parser': 9.1.2 - '@journeyapps-platform/micro-codecs': 3.0.1 - '@journeyapps-platform/micro-errors': 3.0.0 - ajv: 8.14.0 - better-ajv-errors: 1.2.0(ajv@8.14.0) - ts-codec: 1.2.2 - zod: 3.23.8 + fast-json-stringify: 5.16.0 - '@journeyapps-platform/micro-signals@3.0.0': + '@fastify/merge-json-schemas@0.1.1': dependencies: - '@journeyapps-platform/micro-async-hooks': 2.0.0 - '@journeyapps-platform/micro-logger': 3.0.0 - lodash: 4.17.21 + fast-deep-equal: 3.1.3 - '@journeyapps-platform/micro-streaming@5.0.1': - dependencies: - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-schema': 6.0.1 - '@types/express': 4.17.21 - bson: 6.7.0 + '@gar/promisify@1.1.3': {} - '@journeyapps-platform/micro-system@5.0.0': - dependencies: - '@journeyapps-platform/micro-signals': 3.0.0 - '@opentelemetry/api': 1.3.0 - - '@journeyapps-platform/micro-telemetry@2.0.0': - dependencies: - '@journeyapps-platform/micro-async-hooks': 2.0.0 - '@journeyapps-platform/micro-signals': 3.0.0 - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.3.0) - '@opentelemetry/exporter-metrics-otlp-grpc': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/exporter-trace-otlp-grpc': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/instrumentation-http': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/instrumentation-mongodb': 0.33.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-metrics': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-node': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/semantic-conventions': 1.8.0 - transitivePeerDependencies: - - supports-color + '@humanwhocodes/momoa@2.0.4': {} - '@journeyapps-platform/micro-tracing@4.0.0': - dependencies: - '@journeyapps-platform/micro-errors': 3.0.0 - '@opentelemetry/api': 1.3.0 - lodash: 4.17.21 + '@inquirer/figures@1.0.2': {} - '@journeyapps-platform/micro-utils@5.0.0': + '@isaacs/cliui@8.0.2': dependencies: - dotenv: 16.4.5 - lodash: 4.17.21 - zod: 3.23.8 - - '@journeyapps-platform/micro@17.0.1(encoding@0.1.13)(socks@2.8.3)': - dependencies: - '@journeyapps-platform/micro-alerts': 4.0.0 - '@journeyapps-platform/micro-async-hooks': 2.0.0 - '@journeyapps-platform/micro-authorizers': 6.0.0(encoding@0.1.13) - '@journeyapps-platform/micro-codecs': 3.0.1 - '@journeyapps-platform/micro-db': 3.0.2 - '@journeyapps-platform/micro-errors': 3.0.0 - '@journeyapps-platform/micro-events': 2.0.1(socks@2.8.3) - '@journeyapps-platform/micro-kafka': 8.0.1 - '@journeyapps-platform/micro-locks': 2.0.1(socks@2.8.3) - '@journeyapps-platform/micro-logger': 3.0.0 - '@journeyapps-platform/micro-metrics': 5.0.0 - '@journeyapps-platform/micro-mongo': 5.0.1(socks@2.8.3) - '@journeyapps-platform/micro-repl': 4.0.0 - '@journeyapps-platform/micro-router': 7.0.1 - '@journeyapps-platform/micro-router-docs': 2.0.0 - '@journeyapps-platform/micro-router-express': 9.0.0 - '@journeyapps-platform/micro-router-fastify': 3.0.0 - '@journeyapps-platform/micro-router-ws': 8.0.0 - '@journeyapps-platform/micro-schema': 6.0.1 - '@journeyapps-platform/micro-signals': 3.0.0 - '@journeyapps-platform/micro-streaming': 5.0.1 - '@journeyapps-platform/micro-system': 5.0.0 - '@journeyapps-platform/micro-telemetry': 2.0.0 - '@journeyapps-platform/micro-tracing': 4.0.0 - '@journeyapps-platform/micro-utils': 5.0.0 - dotenv: 16.4.5 - transitivePeerDependencies: - - '@aws-sdk/credential-providers' - - '@mongodb-js/zstd' - - bufferutil - - encoding - - gcp-metadata - - kerberos - - mongodb-client-encryption - - snappy - - socks - - supports-color - - utf-8-validate + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 - '@journeyapps-platform/types-micro-ws@2.0.0': {} + '@jest/schemas@29.6.3': + dependencies: + '@sinclair/typebox': 0.27.8 '@jridgewell/resolve-uri@3.1.2': {} @@ -5291,10 +4389,6 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.4.15 - '@js-sdsl/ordered-map@4.4.2': {} - - '@jsdevtools/ono@7.1.3': {} - '@ljharb/through@2.3.13': dependencies: call-bind: 1.0.7 @@ -5388,8 +4482,6 @@ snapshots: dependencies: '@opentelemetry/api': 1.8.0 - '@opentelemetry/api@1.3.0': {} - '@opentelemetry/api@1.6.0': {} '@opentelemetry/api@1.8.0': {} @@ -5400,20 +4492,11 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/context-async-hooks@1.8.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core@1.17.0(@opentelemetry/api@1.6.0)': dependencies: '@opentelemetry/api': 1.6.0 '@opentelemetry/semantic-conventions': 1.17.0 - '@opentelemetry/core@1.24.1(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/semantic-conventions': 1.24.1 - '@opentelemetry/core@1.24.1(@opentelemetry/api@1.6.0)': dependencies: '@opentelemetry/api': 1.6.0 @@ -5439,39 +4522,6 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.25.0 - '@opentelemetry/core@1.8.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/semantic-conventions': 1.8.0 - - '@opentelemetry/exporter-jaeger@1.8.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/semantic-conventions': 1.8.0 - jaeger-client: 3.19.0 - - '@opentelemetry/exporter-metrics-otlp-grpc@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@grpc/grpc-js': 1.10.8 - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/exporter-metrics-otlp-http': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-grpc-exporter-base': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-transformer': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-metrics': 1.8.0(@opentelemetry/api@1.3.0) - - '@opentelemetry/exporter-metrics-otlp-http@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-exporter-base': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-transformer': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-metrics': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/exporter-metrics-otlp-http@0.51.1(@opentelemetry/api@1.8.0)': dependencies: '@opentelemetry/api': 1.8.0 @@ -5495,43 +4545,6 @@ snapshots: '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/exporter-trace-otlp-grpc@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@grpc/grpc-js': 1.10.8 - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-grpc-exporter-base': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-transformer': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) - - '@opentelemetry/exporter-trace-otlp-http@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-exporter-base': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-transformer': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) - - '@opentelemetry/exporter-trace-otlp-proto@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-exporter-base': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-proto-exporter-base': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-transformer': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) - - '@opentelemetry/exporter-zipkin@1.8.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/semantic-conventions': 1.8.0 - '@opentelemetry/instrumentation-connect@0.37.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -5576,16 +4589,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-http@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/instrumentation': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/semantic-conventions': 1.8.0 - semver: 7.6.2 - transitivePeerDependencies: - - supports-color - '@opentelemetry/instrumentation-http@0.52.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -5616,14 +4619,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-mongodb@0.33.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/instrumentation': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/semantic-conventions': 1.24.1 - transitivePeerDependencies: - - supports-color - '@opentelemetry/instrumentation-mongodb@0.45.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -5688,15 +4683,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - require-in-the-middle: 5.2.0 - semver: 7.6.2 - shimmer: 1.2.1 - transitivePeerDependencies: - - supports-color - '@opentelemetry/instrumentation@0.43.0(@opentelemetry/api@1.8.0)': dependencies: '@opentelemetry/api': 1.8.0 @@ -5733,39 +4719,11 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/otlp-exporter-base@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-exporter-base@0.51.1(@opentelemetry/api@1.8.0)': dependencies: '@opentelemetry/api': 1.8.0 '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-grpc-exporter-base@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@grpc/grpc-js': 1.10.8 - '@grpc/proto-loader': 0.7.13 - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-exporter-base': 0.34.0(@opentelemetry/api@1.3.0) - - '@opentelemetry/otlp-proto-exporter-base@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-exporter-base': 0.34.0(@opentelemetry/api@1.3.0) - protobufjs: 7.1.1 - - '@opentelemetry/otlp-transformer@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-metrics': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/otlp-transformer@0.51.1(@opentelemetry/api@1.8.0)': dependencies: '@opentelemetry/api': 1.8.0 @@ -5776,16 +4734,6 @@ snapshots: '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0) '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/propagator-b3@1.8.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - - '@opentelemetry/propagator-jaeger@1.8.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/redis-common@0.36.2': {} '@opentelemetry/resources@1.17.0(@opentelemetry/api@1.6.0)': @@ -5824,12 +4772,6 @@ snapshots: '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.25.0 - '@opentelemetry/resources@1.8.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/semantic-conventions': 1.8.0 - '@opentelemetry/sdk-logs@0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0)': dependencies: '@opentelemetry/api': 1.8.0 @@ -5865,31 +4807,6 @@ snapshots: '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) lodash.merge: 4.6.2 - '@opentelemetry/sdk-metrics@1.8.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) - lodash.merge: 4.6.2 - - '@opentelemetry/sdk-node@0.34.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/exporter-jaeger': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/exporter-trace-otlp-grpc': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/exporter-trace-otlp-http': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/exporter-trace-otlp-proto': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/exporter-zipkin': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/instrumentation': 0.34.0(@opentelemetry/api@1.3.0) - '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-metrics': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-trace-node': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/semantic-conventions': 1.8.0 - transitivePeerDependencies: - - supports-color - '@opentelemetry/sdk-trace-base@1.24.1(@opentelemetry/api@1.8.0)': dependencies: '@opentelemetry/api': 1.8.0 @@ -5911,31 +4828,12 @@ snapshots: '@opentelemetry/resources': 1.25.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.25.0 - '@opentelemetry/sdk-trace-base@1.8.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/resources': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/semantic-conventions': 1.8.0 - - '@opentelemetry/sdk-trace-node@1.8.0(@opentelemetry/api@1.3.0)': - dependencies: - '@opentelemetry/api': 1.3.0 - '@opentelemetry/context-async-hooks': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/core': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/propagator-b3': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/propagator-jaeger': 1.8.0(@opentelemetry/api@1.3.0) - '@opentelemetry/sdk-trace-base': 1.8.0(@opentelemetry/api@1.3.0) - semver: 7.6.2 - '@opentelemetry/semantic-conventions@1.17.0': {} '@opentelemetry/semantic-conventions@1.24.1': {} '@opentelemetry/semantic-conventions@1.25.0': {} - '@opentelemetry/semantic-conventions@1.8.0': {} - '@opentelemetry/sql-common@0.40.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -5964,29 +4862,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@protobufjs/aspromise@1.1.2': {} - - '@protobufjs/base64@1.1.2': {} - - '@protobufjs/codegen@2.0.4': {} - - '@protobufjs/eventemitter@1.1.0': {} - - '@protobufjs/fetch@1.1.0': - dependencies: - '@protobufjs/aspromise': 1.1.2 - '@protobufjs/inquire': 1.1.0 - - '@protobufjs/float@1.0.2': {} - - '@protobufjs/inquire@1.1.0': {} - - '@protobufjs/path@1.1.2': {} - - '@protobufjs/pool@1.1.0': {} - - '@protobufjs/utf8@1.1.0': {} - '@rollup/rollup-android-arm-eabi@4.18.0': optional: true @@ -6035,37 +4910,11 @@ snapshots: '@rollup/rollup-win32-x64-msvc@4.18.0': optional: true - '@sentry-internal/tracing@7.116.0': - dependencies: - '@sentry/core': 7.116.0 - '@sentry/types': 7.116.0 - '@sentry/utils': 7.116.0 - - '@sentry/core@7.116.0': - dependencies: - '@sentry/types': 7.116.0 - '@sentry/utils': 7.116.0 - '@sentry/core@8.9.2': dependencies: '@sentry/types': 8.9.2 '@sentry/utils': 8.9.2 - '@sentry/integrations@7.116.0': - dependencies: - '@sentry/core': 7.116.0 - '@sentry/types': 7.116.0 - '@sentry/utils': 7.116.0 - localforage: 1.10.0 - - '@sentry/node@7.116.0': - dependencies: - '@sentry-internal/tracing': 7.116.0 - '@sentry/core': 7.116.0 - '@sentry/integrations': 7.116.0 - '@sentry/types': 7.116.0 - '@sentry/utils': 7.116.0 - '@sentry/node@8.9.2': dependencies: '@opentelemetry/api': 1.9.0 @@ -6092,7 +4941,7 @@ snapshots: '@opentelemetry/semantic-conventions': 1.25.0 '@prisma/instrumentation': 5.15.0 '@sentry/core': 8.9.2 - '@sentry/opentelemetry': 8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.6.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/semantic-conventions@1.25.0) + '@sentry/opentelemetry': 8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.6.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.0) '@sentry/types': 8.9.2 '@sentry/utils': 8.9.2 optionalDependencies: @@ -6100,7 +4949,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@sentry/opentelemetry@8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.6.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/semantic-conventions@1.25.0)': + '@sentry/opentelemetry@8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.6.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) @@ -6111,14 +4960,8 @@ snapshots: '@sentry/types': 8.9.2 '@sentry/utils': 8.9.2 - '@sentry/types@7.116.0': {} - '@sentry/types@8.9.2': {} - '@sentry/utils@7.116.0': - dependencies: - '@sentry/types': 7.116.0 - '@sentry/utils@8.9.2': dependencies: '@sentry/types': 8.9.2 @@ -6229,8 +5072,6 @@ snapshots: '@types/http-errors@2.0.4': {} - '@types/json-schema@7.0.15': {} - '@types/keygrip@1.0.6': {} '@types/koa-compose@3.2.8': @@ -6357,11 +5198,6 @@ snapshots: abstract-logging@2.0.1: {} - accepts@1.3.8: - dependencies: - mime-types: 2.1.35 - negotiator: 0.6.3 - acorn-import-assertions@1.9.0(acorn@8.11.3): dependencies: acorn: 8.11.3 @@ -6409,8 +5245,6 @@ snapshots: dependencies: string-width: 4.2.3 - ansi-color@0.2.1: {} - ansi-colors@4.1.3: {} ansi-escapes@4.3.2: @@ -6458,8 +5292,6 @@ snapshots: call-bind: 1.0.7 is-array-buffer: 3.0.4 - array-flatten@1.1.1: {} - array-union@2.1.0: {} array.prototype.flat@1.3.2: @@ -6526,23 +5358,6 @@ snapshots: inherits: 2.0.4 readable-stream: 3.6.2 - body-parser@1.20.2: - dependencies: - bytes: 3.1.2 - content-type: 1.0.5 - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - on-finished: 2.4.1 - qs: 6.11.0 - raw-body: 2.5.2 - type-is: 1.6.18 - unpipe: 1.0.0 - transitivePeerDependencies: - - supports-color - boxen@7.1.1: dependencies: ansi-align: 3.0.1 @@ -6585,15 +5400,6 @@ snapshots: base64-js: 1.5.1 ieee754: 1.2.1 - bufrw@1.4.0: - dependencies: - ansi-color: 0.2.1 - error: 7.0.2 - hexer: 1.5.0 - xtend: 4.0.2 - - bytes@3.1.2: {} - cac@6.7.14: {} cacache@16.1.3: @@ -6654,8 +5460,6 @@ snapshots: get-intrinsic: 1.2.4 set-function-length: 1.2.2 - call-me-maybe@1.0.2: {} - camelcase-keys@6.2.2: dependencies: camelcase: 5.3.1 @@ -6817,14 +5621,6 @@ snapshots: console-control-strings@1.1.0: {} - content-disposition@0.5.4: - dependencies: - safe-buffer: 5.2.1 - - content-type@1.0.5: {} - - cookie-signature@1.0.6: {} - cookie@0.6.0: {} copyfiles@2.4.1: @@ -6899,10 +5695,6 @@ snapshots: dependencies: '@babel/runtime': 7.24.6 - debug@2.6.9: - dependencies: - ms: 2.0.0 - debug@4.3.4(supports-color@5.5.0): dependencies: ms: 2.1.2 @@ -6946,10 +5738,6 @@ snapshots: delegates@1.0.0: {} - depd@2.0.0: {} - - destroy@1.2.0: {} - detect-indent@6.1.0: {} diff-sequences@29.6.3: {} @@ -6976,16 +5764,12 @@ snapshots: eastasianwidth@0.2.0: {} - ee-first@1.1.1: {} - emoji-regex@8.0.0: {} emoji-regex@9.2.2: {} enabled@2.0.0: {} - encodeurl@1.0.2: {} - encoding@0.1.13: dependencies: iconv-lite: 0.6.3 @@ -7004,11 +5788,6 @@ snapshots: dependencies: is-arrayish: 0.2.1 - error@7.0.2: - dependencies: - string-template: 0.2.1 - xtend: 4.0.2 - es-abstract@1.23.3: dependencies: array-buffer-byte-length: 1.0.1 @@ -7114,14 +5893,10 @@ snapshots: escape-goat@4.0.0: {} - escape-html@1.0.3: {} - escape-string-regexp@1.0.5: {} esprima@4.0.1: {} - etag@1.8.1: {} - event-stream@3.3.4: dependencies: duplexer: 0.1.2 @@ -7138,42 +5913,6 @@ snapshots: exponential-backoff@3.1.1: {} - express@4.19.2: - dependencies: - accepts: 1.3.8 - array-flatten: 1.1.1 - body-parser: 1.20.2 - content-disposition: 0.5.4 - content-type: 1.0.5 - cookie: 0.6.0 - cookie-signature: 1.0.6 - debug: 2.6.9 - depd: 2.0.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - finalhandler: 1.2.0 - fresh: 0.5.2 - http-errors: 2.0.0 - merge-descriptors: 1.0.1 - methods: 1.1.2 - on-finished: 2.4.1 - parseurl: 1.3.3 - path-to-regexp: 0.1.7 - proxy-addr: 2.0.7 - qs: 6.11.0 - range-parser: 1.2.1 - safe-buffer: 5.2.1 - send: 0.18.0 - serve-static: 1.15.0 - setprototypeof: 1.2.0 - statuses: 2.0.1 - type-is: 1.6.18 - utils-merge: 1.0.1 - vary: 1.1.2 - transitivePeerDependencies: - - supports-color - extendable-error@0.1.7: {} external-editor@3.1.0: @@ -7252,18 +5991,6 @@ snapshots: dependencies: to-regex-range: 5.0.1 - finalhandler@1.2.0: - dependencies: - debug: 2.6.9 - encodeurl: 1.0.2 - escape-html: 1.0.3 - on-finished: 2.4.1 - parseurl: 1.3.3 - statuses: 2.0.1 - unpipe: 1.0.0 - transitivePeerDependencies: - - supports-color - find-my-way@7.7.0: dependencies: fast-deep-equal: 3.1.3 @@ -7306,8 +6033,6 @@ snapshots: fp-and-or@0.1.4: {} - fresh@0.5.2: {} - from@0.1.7: {} fs-extra@7.0.1: @@ -7480,13 +6205,6 @@ snapshots: dependencies: function-bind: 1.1.2 - hexer@1.5.0: - dependencies: - ansi-color: 0.2.1 - minimist: 1.2.8 - process: 0.10.1 - xtend: 4.0.2 - hosted-git-info@2.8.9: {} hosted-git-info@5.2.1: @@ -7499,14 +6217,6 @@ snapshots: http-cache-semantics@4.1.1: {} - http-errors@2.0.0: - dependencies: - depd: 2.0.0 - inherits: 2.0.4 - setprototypeof: 1.2.0 - statuses: 2.0.1 - toidentifier: 1.0.1 - http-proxy-agent@5.0.0: dependencies: '@tootallnate/once': 2.0.0 @@ -7552,8 +6262,6 @@ snapshots: ignore@5.3.1: {} - immediate@3.0.6: {} - import-in-the-middle@1.4.2: dependencies: acorn: 8.11.3 @@ -7761,14 +6469,6 @@ snapshots: optionalDependencies: '@pkgjs/parseargs': 0.11.0 - jaeger-client@3.19.0: - dependencies: - node-int64: 0.4.0 - opentracing: 0.14.7 - thriftrw: 3.11.4 - uuid: 8.3.2 - xorshift: 1.2.0 - jju@1.4.0: {} jose@4.15.5: {} @@ -7814,8 +6514,6 @@ snapshots: jsonpointer@5.0.1: {} - kafkajs@2.2.4: {} - keyv@4.5.4: dependencies: json-buffer: 3.0.1 @@ -7832,10 +6530,6 @@ snapshots: leven@3.1.0: {} - lie@3.1.1: - dependencies: - immediate: 3.0.6 - light-my-request@5.13.0: dependencies: cookie: 0.6.0 @@ -7853,10 +6547,6 @@ snapshots: local-pkg@0.4.3: {} - localforage@1.10.0: - dependencies: - lie: 3.1.1 - locate-path@5.0.0: dependencies: p-locate: 4.1.0 @@ -7865,8 +6555,6 @@ snapshots: dependencies: p-locate: 5.0.0 - lodash.camelcase@4.3.0: {} - lodash.merge@4.6.2: {} lodash.startcase@4.4.0: {} @@ -7878,11 +6566,6 @@ snapshots: chalk: 4.1.2 is-unicode-supported: 0.1.0 - logfmt@1.4.0: - dependencies: - split: 0.2.10 - through: 2.3.8 - logform@2.6.0: dependencies: '@colors/colors': 1.6.0 @@ -7892,10 +6575,6 @@ snapshots: safe-stable-stringify: 2.4.3 triple-beam: 1.4.1 - long@2.4.0: {} - - long@5.2.3: {} - lossless-json@2.0.11: {} loupe@2.3.7: @@ -7967,8 +6646,6 @@ snapshots: map-stream@0.1.0: {} - media-typer@0.3.0: {} - memory-pager@1.5.0: {} meow@6.1.1: @@ -7985,25 +6662,13 @@ snapshots: type-fest: 0.13.1 yargs-parser: 18.1.3 - merge-descriptors@1.0.1: {} - merge2@1.4.1: {} - methods@1.1.2: {} - micromatch@4.0.7: dependencies: braces: 3.0.3 picomatch: 2.3.1 - mime-db@1.52.0: {} - - mime-types@2.1.35: - dependencies: - mime-db: 1.52.0 - - mime@1.6.0: {} - mimic-fn@2.1.0: {} mimic-response@3.1.0: {} @@ -8114,8 +6779,6 @@ snapshots: moo@0.5.2: {} - ms@2.0.0: {} - ms@2.1.2: {} ms@2.1.3: {} @@ -8137,12 +6800,6 @@ snapshots: node-domexception@1.0.0: {} - node-fetch@2.7.0(encoding@0.1.13): - dependencies: - whatwg-url: 5.0.0 - optionalDependencies: - encoding: 0.1.13 - node-fetch@3.3.2: dependencies: data-uri-to-buffer: 4.0.1 @@ -8166,8 +6823,6 @@ snapshots: - bluebird - supports-color - node-int64@0.4.0: {} - nodemon@3.1.1: dependencies: chokidar: 3.6.0 @@ -8311,10 +6966,6 @@ snapshots: on-exit-leak-free@2.1.2: {} - on-finished@2.4.1: - dependencies: - ee-first: 1.1.1 - once@1.4.0: dependencies: wrappy: 1.0.2 @@ -8336,8 +6987,6 @@ snapshots: - supports-color optional: true - opentracing@0.14.7: {} - ora@5.4.1: dependencies: bl: 4.1.0 @@ -8428,8 +7077,6 @@ snapshots: json-parse-even-better-errors: 2.3.1 lines-and-columns: 1.2.4 - parseurl@1.3.3: {} - path-exists@4.0.0: {} path-is-absolute@1.0.1: {} @@ -8443,8 +7090,6 @@ snapshots: lru-cache: 10.2.2 minipass: 7.1.2 - path-to-regexp@0.1.7: {} - path-type@4.0.0: {} pathe@1.1.2: {} @@ -8552,8 +7197,6 @@ snapshots: process-warning@3.0.0: {} - process@0.10.1: {} - process@0.11.10: {} progress@2.0.3: {} @@ -8572,36 +7215,6 @@ snapshots: proto-list@1.2.4: {} - protobufjs@7.1.1: - dependencies: - '@protobufjs/aspromise': 1.1.2 - '@protobufjs/base64': 1.1.2 - '@protobufjs/codegen': 2.0.4 - '@protobufjs/eventemitter': 1.1.0 - '@protobufjs/fetch': 1.1.0 - '@protobufjs/float': 1.0.2 - '@protobufjs/inquire': 1.1.0 - '@protobufjs/path': 1.1.2 - '@protobufjs/pool': 1.1.0 - '@protobufjs/utf8': 1.1.0 - '@types/node': 18.11.11 - long: 5.2.3 - - protobufjs@7.3.0: - dependencies: - '@protobufjs/aspromise': 1.1.2 - '@protobufjs/base64': 1.1.2 - '@protobufjs/codegen': 2.0.4 - '@protobufjs/eventemitter': 1.1.0 - '@protobufjs/fetch': 1.1.0 - '@protobufjs/float': 1.0.2 - '@protobufjs/inquire': 1.1.0 - '@protobufjs/path': 1.1.2 - '@protobufjs/pool': 1.1.0 - '@protobufjs/utf8': 1.1.0 - '@types/node': 18.11.11 - long: 5.2.3 - proxy-addr@2.0.7: dependencies: forwarded: 0.2.0 @@ -8621,10 +7234,6 @@ snapshots: dependencies: escape-goat: 4.0.0 - qs@6.11.0: - dependencies: - side-channel: 1.0.6 - queue-microtask@1.2.3: {} quick-format-unescaped@4.0.4: {} @@ -8640,15 +7249,6 @@ snapshots: discontinuous-range: 1.0.0 ret: 0.1.15 - range-parser@1.2.1: {} - - raw-body@2.5.2: - dependencies: - bytes: 3.1.2 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - unpipe: 1.0.0 - rc-config-loader@4.1.3: dependencies: debug: 4.3.4(supports-color@5.5.0) @@ -8764,14 +7364,6 @@ snapshots: require-from-string@2.0.2: {} - require-in-the-middle@5.2.0: - dependencies: - debug: 4.3.4(supports-color@5.5.0) - module-details-from-path: 1.0.3 - resolve: 1.22.8 - transitivePeerDependencies: - - supports-color - require-in-the-middle@7.3.0: dependencies: debug: 4.3.4(supports-color@5.5.0) @@ -8898,33 +7490,6 @@ snapshots: semver@7.6.2: {} - send@0.18.0: - dependencies: - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - fresh: 0.5.2 - http-errors: 2.0.0 - mime: 1.6.0 - ms: 2.1.3 - on-finished: 2.4.1 - range-parser: 1.2.1 - statuses: 2.0.1 - transitivePeerDependencies: - - supports-color - - serve-static@1.15.0: - dependencies: - encodeurl: 1.0.2 - escape-html: 1.0.3 - parseurl: 1.3.3 - send: 0.18.0 - transitivePeerDependencies: - - supports-color - set-blocking@2.0.0: {} set-cookie-parser@2.6.0: {} @@ -8945,8 +7510,6 @@ snapshots: functions-have-names: 1.2.3 has-property-descriptors: 1.0.2 - setprototypeof@1.2.0: {} - shebang-command@1.2.0: dependencies: shebang-regex: 1.0.0 @@ -9066,10 +7629,6 @@ snapshots: split2@4.2.0: {} - split@0.2.10: - dependencies: - through: 2.3.8 - split@0.3.3: dependencies: through: 2.3.8 @@ -9090,8 +7649,6 @@ snapshots: stackback@0.0.2: {} - statuses@2.0.1: {} - std-env@3.7.0: {} stream-combiner@0.0.4: @@ -9104,8 +7661,6 @@ snapshots: string-argv@0.3.2: {} - string-template@0.2.1: {} - string-width@4.2.3: dependencies: emoji-regex: 8.0.0 @@ -9200,12 +7755,6 @@ snapshots: dependencies: real-require: 0.2.0 - thriftrw@3.11.4: - dependencies: - bufrw: 1.4.0 - error: 7.0.2 - long: 2.4.0 - through2@2.0.5: dependencies: readable-stream: 2.3.8 @@ -9229,12 +7778,8 @@ snapshots: toad-cache@3.7.0: {} - toidentifier@1.0.1: {} - touch@3.1.1: {} - tr46@0.0.3: {} - tr46@4.1.1: dependencies: punycode: 2.3.1 @@ -9336,11 +7881,6 @@ snapshots: type-fest@2.19.0: {} - type-is@1.6.18: - dependencies: - media-typer: 0.3.0 - mime-types: 2.1.35 - typed-array-buffer@1.0.2: dependencies: call-bind: 1.0.7 @@ -9414,8 +7954,6 @@ snapshots: universalify@0.1.2: {} - unpipe@1.0.0: {} - untildify@4.0.0: {} update-notifier@6.0.2: @@ -9441,10 +7979,6 @@ snapshots: util-deprecate@1.0.2: {} - utils-merge@1.0.1: {} - - uuid@8.3.2: {} - uuid@9.0.1: {} v8-compile-cache-lib@3.0.1: {} @@ -9537,8 +8071,6 @@ snapshots: web-streams-polyfill@3.3.3: {} - webidl-conversions@3.0.1: {} - webidl-conversions@7.0.0: {} whatwg-url@13.0.0: @@ -9546,11 +8078,6 @@ snapshots: tr46: 4.1.1 webidl-conversions: 7.0.0 - whatwg-url@5.0.0: - dependencies: - tr46: 0.0.3 - webidl-conversions: 3.0.1 - which-boxed-primitive@1.0.2: dependencies: is-bigint: 1.0.4 @@ -9652,8 +8179,6 @@ snapshots: xdg-basedir@5.1.0: {} - xorshift@1.2.0: {} - xtend@4.0.2: {} y18n@4.0.3: {} diff --git a/service/package.json b/service/package.json index 9ff37aace..3f36a1b8f 100644 --- a/service/package.json +++ b/service/package.json @@ -11,8 +11,6 @@ }, "dependencies": { "@fastify/cors": "8.4.1", - "@journeyapps-platform/micro": "^17.0.1", - "@journeyapps-platform/micro-migrate": "^4.0.1", "@opentelemetry/api": "~1.6.0", "@opentelemetry/exporter-prometheus": "^0.43.0", "@opentelemetry/sdk-metrics": "^1.17.0", diff --git a/service/src/runners/stream-worker.ts b/service/src/runners/stream-worker.ts index 6bb23d488..faba8463f 100644 --- a/service/src/runners/stream-worker.ts +++ b/service/src/runners/stream-worker.ts @@ -1,4 +1,3 @@ -import { Direction } from '@journeyapps-platform/micro-migrate'; import { migrations, replication, utils, Metrics } from '@powersync/service-core'; import { logger } from '@powersync/service-framework'; @@ -12,7 +11,7 @@ export async function startStreamWorker(runnerConfig: utils.RunnerConfig) { // Self hosted version allows for automatic migrations if (!config.migrations?.disable_auto_migration) { await migrations.migrate({ - direction: Direction.Up, + direction: migrations.Direction.Up, runner_config: runnerConfig }); } From 29b71cbe336e49ffc6591c682734375591f63cfe Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 09:46:13 +0200 Subject: [PATCH 17/36] remove requirement for npm credentials --- .../workflows/development_image_release.yaml | 9 -- .../development_packages_release.yaml | 7 -- .github/workflows/image_release.yaml | 105 +++++++++--------- .github/workflows/packages_release.yaml | 7 -- .github/workflows/test.yml | 7 -- packages/rsocket-router/README.md | 2 +- .../src/router/ReactiveSocketRouter.ts | 2 +- service/Dockerfile | 7 +- service/src/util/alerting.ts | 2 +- 9 files changed, 55 insertions(+), 93 deletions(-) diff --git a/.github/workflows/development_image_release.yaml b/.github/workflows/development_image_release.yaml index f492e8e6a..cc4ff2a75 100644 --- a/.github/workflows/development_image_release.yaml +++ b/.github/workflows/development_image_release.yaml @@ -41,12 +41,6 @@ jobs: version: 9 run_install: false - - name: Add NPM auth - run: | - echo //npm.pkg.github.com/:_authToken=\${{secrets.RESTRICTED_PACKAGES_TOKEN}} >> ~/.npmrc - echo "//npm.pkg.github.com/journeyapps-platform/:_authToken=${{secrets.RESTRICTED_PACKAGES_TOKEN}}" >> ~/.npmrc - echo "@journeyapps-platform:registry=https://npm.pkg.github.com/journeyapps-platform/" >> ~/.npmrc - - name: Get pnpm store directory shell: bash run: | @@ -86,6 +80,3 @@ jobs: tags: ${{vars.DOCKER_REGISTRY}}:${{steps.get_version.outputs.SERVICE_VERSION}} push: true file: ./service/Dockerfile - # TODO remove this when removing Journey Micro - build-args: | - GITHUB_TOKEN=${{secrets.RESTRICTED_PACKAGES_TOKEN}} diff --git a/.github/workflows/development_packages_release.yaml b/.github/workflows/development_packages_release.yaml index 34b54033b..ccafb969d 100644 --- a/.github/workflows/development_packages_release.yaml +++ b/.github/workflows/development_packages_release.yaml @@ -27,13 +27,6 @@ jobs: version: 9 run_install: false - - name: Add NPM auth - run: | - echo "//registry.npmjs.org/:_authToken=${{secrets.NPM_TOKEN}}" >> ~/.npmrc - echo //npm.pkg.github.com/:_authToken=\${{secrets.RESTRICTED_PACKAGES_TOKEN}} >> ~/.npmrc - echo "//npm.pkg.github.com/journeyapps-platform/:_authToken=${{secrets.RESTRICTED_PACKAGES_TOKEN}}" >> ~/.npmrc - echo "@journeyapps-platform:registry=https://npm.pkg.github.com/journeyapps-platform/" >> ~/.npmrc - - name: Get pnpm store directory shell: bash run: | diff --git a/.github/workflows/image_release.yaml b/.github/workflows/image_release.yaml index 1f5749c98..81c03e168 100644 --- a/.github/workflows/image_release.yaml +++ b/.github/workflows/image_release.yaml @@ -2,60 +2,57 @@ name: Docker Image Release on: workflow_dispatch - + concurrency: ${{ github.workflow }}-${{ github.ref }} jobs: - release-docker-image: - name: Build and Release powersync-service Docker Image - runs-on: ubuntu-latest - if: github.ref == 'refs/heads/main' - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # check out full history - # Temporarily needed for changesets - fetch-depth: 0 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Login to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - # This uses the service's package.json version for the Docker Image tag - - name: Get Service Version from package.json - id: get_version - run: echo "SERVICE_VERSION=$(node -p "require('./service/package.json').version")" >> $GITHUB_OUTPUT - - - name: Build Image and Push - uses: docker/build-push-action@v5 - with: - platforms: linux/arm64,linux/amd64 - cache-from: type=registry,ref=${{vars.DOCKER_REGISTRY}}:latest - context: . - tags: ${{vars.DOCKER_REGISTRY}}:latest,${{vars.DOCKER_REGISTRY}}:${{steps.get_version.outputs.SERVICE_VERSION}} - push: true - file: ./service/Dockerfile - # TODO remove this when removing Journey Micro - build-args: | - GITHUB_TOKEN=${{secrets.RESTRICTED_PACKAGES_TOKEN}} - - # # Updates the README section on the DockerHub page - - name: Update repo description - # Note that this 3rd party extention is recommended in the DockerHub docs: - # https://docs.docker.com/build/ci/github-actions/update-dockerhub-desc/ - uses: peter-evans/dockerhub-description@e98e4d1628a5f3be2be7c231e50981aee98723ae # v4.0.0 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - repository: ${{vars.DOCKER_REGISTRY}} - # This is the contents of what will be shown on DockerHub - readme-filepath: ./service/README.md \ No newline at end of file + release-docker-image: + name: Build and Release powersync-service Docker Image + runs-on: ubuntu-latest + if: github.ref == 'refs/heads/main' + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # check out full history + # Temporarily needed for changesets + fetch-depth: 0 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + # This uses the service's package.json version for the Docker Image tag + - name: Get Service Version from package.json + id: get_version + run: echo "SERVICE_VERSION=$(node -p "require('./service/package.json').version")" >> $GITHUB_OUTPUT + + - name: Build Image and Push + uses: docker/build-push-action@v5 + with: + platforms: linux/arm64,linux/amd64 + cache-from: type=registry,ref=${{vars.DOCKER_REGISTRY}}:latest + context: . + tags: ${{vars.DOCKER_REGISTRY}}:latest,${{vars.DOCKER_REGISTRY}}:${{steps.get_version.outputs.SERVICE_VERSION}} + push: true + file: ./service/Dockerfile + + # # Updates the README section on the DockerHub page + - name: Update repo description + # Note that this 3rd party extention is recommended in the DockerHub docs: + # https://docs.docker.com/build/ci/github-actions/update-dockerhub-desc/ + uses: peter-evans/dockerhub-description@e98e4d1628a5f3be2be7c231e50981aee98723ae # v4.0.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + repository: ${{vars.DOCKER_REGISTRY}} + # This is the contents of what will be shown on DockerHub + readme-filepath: ./service/README.md diff --git a/.github/workflows/packages_release.yaml b/.github/workflows/packages_release.yaml index 64f38c3d6..01339799c 100644 --- a/.github/workflows/packages_release.yaml +++ b/.github/workflows/packages_release.yaml @@ -39,13 +39,6 @@ jobs: restore-keys: | ${{ runner.os }}-pnpm-store- - # TODO remove this when no longer needed - - name: Temporary Package Credentials - run: | - echo //npm.pkg.github.com/:_authToken=\${{secrets.RESTRICTED_PACKAGES_TOKEN}} >> ~/.npmrc - echo "//npm.pkg.github.com/journeyapps-platform/:_authToken=${{secrets.RESTRICTED_PACKAGES_TOKEN}}" >> ~/.npmrc - echo "@journeyapps-platform:registry=https://npm.pkg.github.com/journeyapps-platform/" >> ~/.npmrc - - name: Install dependencies run: pnpm install diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 30ecdcd78..271200510 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -70,13 +70,6 @@ jobs: with: node-version-file: '.nvmrc' - # TODO remove this when no longer needed - - name: Temporary Package Credentials - run: | - echo //npm.pkg.github.com/:_authToken=\${{secrets.RESTRICTED_PACKAGES_TOKEN}} >> ~/.npmrc - echo "//npm.pkg.github.com/journeyapps-platform/:_authToken=${{secrets.RESTRICTED_PACKAGES_TOKEN}}" >> ~/.npmrc - echo "@journeyapps-platform:registry=https://npm.pkg.github.com/journeyapps-platform/" >> ~/.npmrc - - uses: pnpm/action-setup@v2 name: Install pnpm with: diff --git a/packages/rsocket-router/README.md b/packages/rsocket-router/README.md index 74b538e8a..1144d00cb 100644 --- a/packages/rsocket-router/README.md +++ b/packages/rsocket-router/README.md @@ -1,3 +1,3 @@ # RSocket Micro Router -This package wraps the RSocket library to a router format similar to the Journey Micro router. +This package wraps the RSocket library to a router format compatible with endpoint definitions. diff --git a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts index eb4836898..961a14c43 100644 --- a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts +++ b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts @@ -1,7 +1,7 @@ /** * This is a small Router wrapper which uses the RSocket lib * to expose reactive websocket stream in an interface similar to - * other journey micro routers. + * other Journey micro routers. */ import * as http from 'http'; import { Payload, RSocketServer } from 'rsocket-core'; diff --git a/service/Dockerfile b/service/Dockerfile index dbb1a33e6..f942246df 100644 --- a/service/Dockerfile +++ b/service/Dockerfile @@ -5,10 +5,6 @@ RUN npm i -g pnpm@9 ARG GITHUB_TOKEN -RUN echo //npm.pkg.github.com/:_authToken=\${GITHUB_TOKEN} >> ~/.npmrc && \ - echo //npm.pkg.github.com/journeyapps-platform/:_authToken=\${GITHUB_TOKEN} >> ~/.npmrc && \ - echo @journeyapps-platform:registry=https://npm.pkg.github.com/journeyapps-platform/ >> ~/.npmrc - COPY package.json pnpm-workspace.yaml pnpm-lock.yaml tsconfig.base.json ./ COPY service/package.json service/tsconfig.json service/ @@ -43,11 +39,10 @@ WORKDIR /app ARG GITHUB_TOKEN COPY --from=builder /app/ ./ -COPY --from=builder /root/.npmrc /root/.npmrc RUN npm i -g pnpm@9 -RUN pnpm install --frozen-lockfile && rm /root/.npmrc +RUN pnpm install --frozen-lockfile CMD pnpm test diff --git a/service/src/util/alerting.ts b/service/src/util/alerting.ts index f2c579eb5..766348885 100644 --- a/service/src/util/alerting.ts +++ b/service/src/util/alerting.ts @@ -32,7 +32,7 @@ export const createSentryReporter = (opts?: { }); } else { framework.logger.debug( - 'micro-alerts configured with sentry reporter but no SENTRY_DSN environment variable has been set' + 'Alerts configured with sentry reporter but no SENTRY_DSN environment variable has been set' ); } From 8a9472732f4fc4d33b6ec1bff43517b19ac08aac Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 11:41:48 +0200 Subject: [PATCH 18/36] neaten imports --- .../src/router/ReactiveSocketRouter.ts | 21 +++--- .../transport/WebSocketServerTransport.ts | 4 +- .../transport/WebsocketDuplexConnection.ts | 8 +-- packages/rsocket-router/src/router/types.ts | 11 ++- packages/service-core/src/api/diagnostics.ts | 4 +- packages/service-core/src/auth/KeyStore.ts | 4 +- packages/service-core/src/entry/cli-entry.ts | 4 +- packages/service-core/src/metrics/Metrics.ts | 12 ++-- .../service-core/src/migrations/executor.ts | 8 +-- .../service-core/src/replication/WalStream.ts | 66 ++++++++---------- .../src/replication/WalStreamManager.ts | 32 ++++----- .../src/replication/WalStreamRunner.ts | 22 +++--- .../src/routes/endpoints/admin.ts | 28 ++++---- .../src/routes/endpoints/checkpointing.ts | 10 +-- .../service-core/src/routes/endpoints/dev.ts | 26 +++---- .../src/routes/endpoints/socket-route.ts | 12 ++-- .../src/routes/endpoints/sync-rules.ts | 26 +++---- .../src/routes/endpoints/sync-stream.ts | 14 ++-- packages/service-core/src/routes/hooks.ts | 4 +- .../service-core/src/routes/route-register.ts | 24 +++---- packages/service-core/src/routes/router.ts | 6 +- packages/service-core/src/runner/teardown.ts | 33 ++++----- .../src/storage/MongoBucketStorage.ts | 14 ++-- .../src/storage/mongo/MongoBucketBatch.ts | 35 +++++----- .../storage/mongo/MongoSyncBucketStorage.ts | 7 +- .../src/storage/mongo/MongoSyncRulesLock.ts | 6 +- .../src/storage/mongo/PersistedBatch.ts | 4 +- packages/service-core/src/sync/sync.ts | 8 +-- .../src/system/CorePowerSyncSystem.ts | 22 ++---- .../config/collectors/config-collector.ts | 4 +- .../impl/filesystem-config-collector.ts | 4 +- .../util/config/compound-config-collector.ts | 6 +- .../service-core/src/util/memory-tracking.ts | 4 +- .../service-core/src/util/pgwire_utils.ts | 4 +- packages/service-core/src/util/utils.ts | 8 +-- packages/service-core/test/src/env.ts | 12 ++-- packages/service-framework/src/container.ts | 69 +++++++++++++++++++ packages/service-framework/src/index.ts | 2 + .../service-framework/src/logger/Logger.ts | 8 --- .../src/signals/termination-handler.ts | 14 ++-- .../src/system/LifeCycledSystem.ts | 26 +------ service/src/entry.ts | 6 +- service/src/runners/server.ts | 24 ++++--- service/src/runners/stream-worker.ts | 8 ++- service/src/system/PowerSyncSystem.ts | 7 +- service/src/util/alerting.ts | 6 +- 46 files changed, 347 insertions(+), 340 deletions(-) create mode 100644 packages/service-framework/src/container.ts diff --git a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts index 961a14c43..fc4496e24 100644 --- a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts +++ b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts @@ -16,7 +16,7 @@ import { SocketResponder } from './types.js'; import { WebsocketServerTransport } from './transport/WebSocketServerTransport.js'; -import * as framework from '@powersync/service-framework'; +import { container, errors } from '@powersync/service-framework'; export class ReactiveSocketRouter { protected activeConnections: number; @@ -55,13 +55,15 @@ export class ReactiveSocketRouter { wsCreator: () => wss }); + const { logger } = container; + const rSocketServer = new RSocketServer({ transport, acceptor: { accept: async (payload) => { const { max_concurrent_connections } = this.options ?? {}; if (max_concurrent_connections && this.activeConnections >= max_concurrent_connections) { - throw new framework.errors.JourneyError({ + throw new errors.JourneyError({ code: '429', description: `Maximum active concurrent connections limit has been reached` }); @@ -70,7 +72,7 @@ export class ReactiveSocketRouter { // Throwing an exception in this context will be returned to the client side request if (!payload.metadata) { // Meta data is required for endpoint handler path matching - throw new framework.errors.AuthorizationError('No context meta data provided'); + throw new errors.AuthorizationError('No context meta data provided'); } const context = await params.contextProvider(payload.metadata!); @@ -81,7 +83,7 @@ export class ReactiveSocketRouter { const observer = new SocketRouterObserver(); handleReactiveStream(context, { payload, initialN, responder }, observer, params).catch((ex) => { - framework.logger.error(ex); + logger.error(ex); responder.onError(ex); responder.onComplete(); }); @@ -122,6 +124,7 @@ export async function handleReactiveStream( ) { const { payload, responder, initialN } = request; const { metadata } = payload; + const { logger } = container; const exitWithError = (error: any) => { responder.onError(error); @@ -129,7 +132,7 @@ export async function handleReactiveStream( }; if (!metadata) { - return exitWithError(new framework.errors.ValidationError('Metadata is not provided')); + return exitWithError(new errors.ValidationError('Metadata is not provided')); } const meta = await params.metaDecoder(metadata); @@ -139,7 +142,7 @@ export async function handleReactiveStream( const route = params.endpoints.find((e) => e.path == path && e.type == RS_ENDPOINT_TYPE.STREAM); if (!route) { - return exitWithError(new framework.errors.ResourceNotFound('route', `No route for ${path} is configured`)); + return exitWithError(new errors.ResourceNotFound('route', `No route for ${path} is configured`)); } const { handler, authorize, validator, decoder = params.payloadDecoder } = route; @@ -148,14 +151,14 @@ export async function handleReactiveStream( if (validator) { const isValid = validator.validate(requestPayload); if (!isValid.valid) { - return exitWithError(new framework.errors.ValidationError(isValid.errors)); + return exitWithError(new errors.ValidationError(isValid.errors)); } } if (authorize) { const isAuthorized = await authorize({ params: requestPayload, context, observer, responder }); if (!isAuthorized.authorized) { - return exitWithError(new framework.errors.AuthorizationError(isAuthorized.errors)); + return exitWithError(new errors.AuthorizationError(isAuthorized.errors)); } } @@ -168,7 +171,7 @@ export async function handleReactiveStream( initialN }); } catch (ex) { - framework.logger.error(ex); + logger.error(ex); responder.onError(ex); responder.onComplete(); } diff --git a/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts b/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts index a1bc5cf1b..bceefa2c6 100644 --- a/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts +++ b/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts @@ -28,7 +28,7 @@ import { } from 'rsocket-core'; import * as WebSocket from 'ws'; import { WebsocketDuplexConnection } from './WebsocketDuplexConnection.js'; -import * as framework from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; export type SocketFactory = (options: SocketOptions) => WebSocket.WebSocketServer; @@ -76,7 +76,7 @@ export class WebsocketServerTransport implements ServerTransport { const duplex = WebSocket.createWebSocketStream(websocket); WebsocketDuplexConnection.create(duplex, connectionAcceptor, multiplexerDemultiplexerFactory, websocket); } catch (ex) { - framework.logger.error(`Could not create duplex connection`, ex); + container.logger.error(`Could not create duplex connection`, ex); if (websocket.readyState == websocket.OPEN) { websocket.close(); } diff --git a/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts b/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts index 287e4bbc6..1e6db83b2 100644 --- a/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts +++ b/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts @@ -15,7 +15,7 @@ * limitations under the License. */ -import * as framework from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; import { Closeable, Deferred, @@ -93,7 +93,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect }; private handleError = (e: WebSocket.ErrorEvent): void => { - framework.logger.error(`Error in WebSocket duplex connection: ${e}`); + container.logger.error(`Error in WebSocket duplex connection: ${e}`); this.close(e.error); }; @@ -123,7 +123,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect throw new Error(`Unable to deserialize frame`); } } catch (ex) { - framework.logger.info(`Received error deserializing initial frame buffer. Skipping connection request.`, ex); + container.logger.info(`Received error deserializing initial frame buffer. Skipping connection request.`, ex); // The initial frame should always be parsable return socket.end(); } @@ -137,7 +137,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect await connectionAcceptor(frame, connection); socket.resume(); } catch (error) { - framework.logger.info(`Error accepting connection:`, error); + container.logger.info(`Error accepting connection:`, error); connection.close(error); } }); diff --git a/packages/rsocket-router/src/router/types.ts b/packages/rsocket-router/src/router/types.ts index 4c581d82e..cee12ac00 100644 --- a/packages/rsocket-router/src/router/types.ts +++ b/packages/rsocket-router/src/router/types.ts @@ -1,5 +1,5 @@ import * as t from 'ts-codec'; -import * as framework from '@powersync/service-framework'; +import { router } from '@powersync/service-framework'; import { OnExtensionSubscriber, OnNextSubscriber, OnTerminalSubscriber } from 'rsocket-core'; @@ -32,15 +32,12 @@ export type ReactiveStreamPayload = CommonStreamPayload & { }; export type IReactiveStream = Omit< - framework.router.Endpoint< + router.Endpoint< I, O, C, - framework.router.EndpointHandlerPayload & CommonStreamPayload, - framework.router.EndpointHandler< - framework.router.EndpointHandlerPayload & ReactiveStreamPayload, - undefined - > + router.EndpointHandlerPayload & CommonStreamPayload, + router.EndpointHandler & ReactiveStreamPayload, undefined> >, 'method' > & { diff --git a/packages/service-core/src/api/diagnostics.ts b/packages/service-core/src/api/diagnostics.ts index 08febe8f2..91fefd121 100644 --- a/packages/service-core/src/api/diagnostics.ts +++ b/packages/service-core/src/api/diagnostics.ts @@ -7,7 +7,7 @@ import * as storage from '../storage/storage-index.js'; import * as util from '../util/util-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; export async function getConnectionStatus(system: CorePowerSyncSystem): Promise { if (system.pgwire_pool == null) { @@ -134,7 +134,7 @@ export async function getSyncRulesStatus( } } catch (e) { // Ignore - logger.warn(`Unable to get replication lag`, e); + container.logger.warn(`Unable to get replication lag`, e); } } } else { diff --git a/packages/service-core/src/auth/KeyStore.ts b/packages/service-core/src/auth/KeyStore.ts index d0da68141..3e05a21f3 100644 --- a/packages/service-core/src/auth/KeyStore.ts +++ b/packages/service-core/src/auth/KeyStore.ts @@ -3,7 +3,7 @@ import secs from '../util/secs.js'; import { KeyOptions, KeySpec, SUPPORTED_ALGORITHMS } from './KeySpec.js'; import { KeyCollector } from './KeyCollector.js'; import { JwtPayload } from './JwtPayload.js'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; /** * KeyStore to get keys and verify tokens. @@ -145,7 +145,7 @@ export class KeyStore { this.collector.noKeyFound?.().catch((e) => { // Typically this error would be stored on the collector. // This is just a last resort error handling. - logger.error(`Failed to refresh keys`, e); + container.logger.error(`Failed to refresh keys`, e); }); throw new jose.errors.JOSEError( diff --git a/packages/service-core/src/entry/cli-entry.ts b/packages/service-core/src/entry/cli-entry.ts index 2df3f2b68..93b053d25 100644 --- a/packages/service-core/src/entry/cli-entry.ts +++ b/packages/service-core/src/entry/cli-entry.ts @@ -4,7 +4,7 @@ import * as utils from '../util/util-index.js'; import { registerMigrationAction } from './commands/migrate-action.js'; import { registerTearDownAction } from './commands/teardown-action.js'; import { registerStartAction } from './entry-index.js'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; /** * Generates a Commander program which serves as the entry point @@ -32,7 +32,7 @@ export function generateEntryProgram(startHandlers?: Record { @@ -212,7 +212,7 @@ Anonymous telemetry is currently: ${options.disable_telemetry_sharing ? 'disable function getMetrics() { if (cachedRequest == null || Date.now() - cacheTimestamp > MINIMUM_INTERVAL) { cachedRequest = system.storage.getStorageMetrics().catch((e) => { - logger.error(`Failed to get storage metrics`, e); + container.logger.error(`Failed to get storage metrics`, e); return null; }); cacheTimestamp = Date.now(); diff --git a/packages/service-core/src/migrations/executor.ts b/packages/service-core/src/migrations/executor.ts index 9b13f4146..a281ec8c4 100644 --- a/packages/service-core/src/migrations/executor.ts +++ b/packages/service-core/src/migrations/executor.ts @@ -1,6 +1,6 @@ -import * as framework from '@powersync/service-framework'; import * as defs from './definitions.js'; import { MigrationStore } from './store/migration-store.js'; +import { container } from '@powersync/service-framework'; type ExecuteParams = { migrations: defs.Migration[]; @@ -42,7 +42,7 @@ export async function* execute(params: ExecuteParams): AsyncGenerator { // Failures here are okay - this only speeds up stopping the process. - logger.warn('Failed to ping connection', e); + container.logger.warn('Failed to ping connection', e); }); } else { // If we haven't started streaming yet, it could be due to something like @@ -165,7 +157,9 @@ export class WalStream { ] }); if (rs.rows.length == 0) { - logger.info(`Skipping ${tablePattern.schema}.${name} - not part of ${this.publication_name} publication`); + container.logger.info( + `Skipping ${tablePattern.schema}.${name} - not part of ${this.publication_name} publication` + ); continue; } @@ -195,17 +189,17 @@ export class WalStream { const status = await this.storage.getStatus(); if (status.snapshot_done && status.checkpoint_lsn) { - logger.info(`${slotName} Initial replication already done`); + container.logger.info(`${slotName} Initial replication already done`); let last_error = null; // Check that replication slot exists for (let i = 120; i >= 0; i--) { - await this.touch(); + await touch(); if (i == 0) { - this.errorReporter.captureException(last_error, { - level: framework.errors.ErrorSeverity.ERROR, + container.reporter.captureException(last_error, { + level: errors.ErrorSeverity.ERROR, metadata: { replication_slot: slotName } @@ -227,11 +221,11 @@ export class WalStream { ] }); // Success - logger.info(`Slot ${slotName} appears healthy`); + container.logger.info(`Slot ${slotName} appears healthy`); return { needsInitialSync: false }; } catch (e) { last_error = e; - logger.warn(`${slotName} Replication slot error`, e); + container.logger.warn(`${slotName} Replication slot error`, e); if (this.stopped) { throw e; @@ -245,8 +239,8 @@ export class WalStream { /replication slot.*does not exist/.test(e.message) || /publication.*does not exist/.test(e.message) ) { - this.errorReporter.captureException(e, { - level: framework.errors.ErrorSeverity.WARNING, + container.reporter.captureException(e, { + level: errors.ErrorSeverity.WARNING, metadata: { try_index: i, replication_slot: slotName @@ -258,7 +252,7 @@ export class WalStream { // Sample: publication "powersync" does not exist // Happens when publication deleted or never created. // Slot must be re-created in this case. - logger.info(`${slotName} does not exist anymore, will create new slot`); + container.logger.info(`${slotName} does not exist anymore, will create new slot`); throw new MissingReplicationSlotError(`Replication slot ${slotName} does not exist anymore`); } @@ -321,7 +315,7 @@ WHERE oid = $1::regclass`, // with streaming replication. const lsn = pgwire.lsnMakeComparable(row[1]); const snapshot = row[2]; - logger.info(`Created replication slot ${slotName} at ${lsn} with snapshot ${snapshot}`); + container.logger.info(`Created replication slot ${slotName} at ${lsn} with snapshot ${snapshot}`); // https://stackoverflow.com/questions/70160769/postgres-logical-replication-starting-from-given-lsn await db.query('BEGIN'); @@ -343,9 +337,9 @@ WHERE oid = $1::regclass`, // On Supabase, the default is 2 minutes. await db.query(`set local statement_timeout = 0`); - logger.info(`${slotName} Starting initial replication`); + container.logger.info(`${slotName} Starting initial replication`); await this.initialReplication(db, lsn); - logger.info(`${slotName} Initial replication done`); + container.logger.info(`${slotName} Initial replication done`); await db.query('COMMIT'); } catch (e) { await db.query('ROLLBACK'); @@ -362,7 +356,7 @@ WHERE oid = $1::regclass`, await this.snapshotTable(batch, db, table); await batch.markSnapshotDone([table], lsn); - await this.touch(); + await touch(); } } await batch.commit(lsn); @@ -376,7 +370,7 @@ WHERE oid = $1::regclass`, } private async snapshotTable(batch: storage.BucketStorageBatch, db: pgwire.PgConnection, table: storage.SourceTable) { - logger.info(`${this.slot_name} Replicating ${table.qualifiedName}`); + container.logger.info(`${this.slot_name} Replicating ${table.qualifiedName}`); const estimatedCount = await this.estimatedCount(db, table); let at = 0; let lastLogIndex = 0; @@ -402,7 +396,7 @@ WHERE oid = $1::regclass`, return q; }); if (rows.length > 0 && at - lastLogIndex >= 5000) { - logger.info(`${this.slot_name} Replicating ${table.qualifiedName} ${at}/${estimatedCount}`); + container.logger.info(`${this.slot_name} Replicating ${table.qualifiedName} ${at}/${estimatedCount}`); lastLogIndex = at; } if (this.abort_signal.aborted) { @@ -416,7 +410,7 @@ WHERE oid = $1::regclass`, at += rows.length; Metrics.getInstance().rows_replicated_total.add(rows.length); - await this.touch(); + await touch(); } await batch.flush(); @@ -496,7 +490,7 @@ WHERE oid = $1::regclass`, if (msg.tag == 'insert' || msg.tag == 'update' || msg.tag == 'delete') { const table = this.getTable(getRelId(msg.relation)); if (!table.syncAny) { - logger.debug(`Table ${table.qualifiedName} not used in sync rules - skipping`); + container.logger.debug(`Table ${table.qualifiedName} not used in sync rules - skipping`); return null; } @@ -569,7 +563,7 @@ WHERE oid = $1::regclass`, let count = 0; for await (const chunk of replicationStream.pgoutputDecode()) { - await this.touch(); + await touch(); if (this.abort_signal.aborted) { break; @@ -591,7 +585,7 @@ WHERE oid = $1::regclass`, await this.ack(msg.lsn!, replicationStream); } else { if (count % 100 == 0) { - logger.info(`${this.slot_name} replicating op ${count} ${msg.lsn}`); + container.logger.info(`${this.slot_name} replicating op ${count} ${msg.lsn}`); } count += 1; @@ -622,11 +616,11 @@ WHERE oid = $1::regclass`, replicationStream.ack(lsn); } +} - async touch() { - // FIXME: The hosted Kubernetes probe does not actually check the timestamp on this. - // FIXME: We need a timeout of around 5+ minutes in Kubernetes if we do start checking the timestamp, - // or reduce PING_INTERVAL here. - return this.probe.touch(); - } +async function touch() { + // FIXME: The hosted Kubernetes probe does not actually check the timestamp on this. + // FIXME: We need a timeout of around 5+ minutes in Kubernetes if we do start checking the timestamp, + // or reduce PING_INTERVAL here. + return container.probes.touch(); } diff --git a/packages/service-core/src/replication/WalStreamManager.ts b/packages/service-core/src/replication/WalStreamManager.ts index 67357206c..7f4b19bb6 100644 --- a/packages/service-core/src/replication/WalStreamManager.ts +++ b/packages/service-core/src/replication/WalStreamManager.ts @@ -7,7 +7,7 @@ import * as util from '../util/util-index.js'; import { DefaultErrorRateLimiter } from './ErrorRateLimiter.js'; import { WalStreamRunner } from './WalStreamRunner.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; // 5 minutes const PING_INTERVAL = 1_000_000_000n * 300n; @@ -36,8 +36,8 @@ export class WalStreamManager { start() { this.runLoop().catch((e) => { - logger.error(`Fatal WalStream error`, e); - this.system.errorReporter.captureException(e); + container.logger.error(`Fatal WalStream error`, e); + container.reporter.captureException(e); setTimeout(() => { process.exit(1); }, 1000); @@ -57,7 +57,7 @@ export class WalStreamManager { const configured_sync_rules = await util.loadSyncRules(this.system.config); let configured_lock: storage.ReplicationLock | undefined = undefined; if (configured_sync_rules != null) { - logger.info('Loading sync rules from configuration'); + container.logger.info('Loading sync rules from configuration'); try { // Configure new sync rules, if it has changed. // In that case, also immediately take out a lock, so that another process doesn't start replication on it. @@ -69,13 +69,13 @@ export class WalStreamManager { } } catch (e) { // Log, but continue with previous sync rules - logger.error(`Failed to load sync rules from configuration`, e); + container.logger.error(`Failed to load sync rules from configuration`, e); } } else { - logger.info('No sync rules configured - configure via API'); + container.logger.info('No sync rules configured - configure via API'); } while (!this.stopped) { - await this.system.probe.touch(); + await container.probes.touch(); try { const pool = this.system.pgwire_pool; if (pool) { @@ -92,7 +92,7 @@ export class WalStreamManager { } } } catch (e) { - logger.error(`Failed to refresh wal streams`, e); + container.logger.error(`Failed to refresh wal streams`, e); } await new Promise((resolve) => setTimeout(resolve, 5000)); } @@ -116,7 +116,7 @@ export class WalStreamManager { try { await db.query(`SELECT * FROM pg_logical_emit_message(false, 'powersync', 'ping')`); } catch (e) { - logger.warn(`Failed to ping`, e); + container.logger.warn(`Failed to ping`, e); } this.lastPing = now; } @@ -158,9 +158,7 @@ export class WalStreamManager { storage: storage, source_db: this.system.config.connection!, lock, - rateLimiter: this.rateLimiter, - probe: this.system.probe, - errorReporter: this.system.errorReporter + rateLimiter: this.rateLimiter }); newStreams.set(syncRules.id, stream); stream.start(); @@ -169,7 +167,7 @@ export class WalStreamManager { // for example from stricter validation that was added. // This will be retried every couple of seconds. // When new (valid) sync rules are deployed and processed, this one be disabled. - logger.error(`Failed to start replication for ${syncRules.slot_name}`, e); + container.logger.error(`Failed to start replication for ${syncRules.slot_name}`, e); } } } @@ -185,7 +183,7 @@ export class WalStreamManager { await stream.terminate(); } catch (e) { // This will be retried - logger.warn(`Failed to terminate ${stream.slot_name}`, e); + container.logger.warn(`Failed to terminate ${stream.slot_name}`, e); } } @@ -201,16 +199,14 @@ export class WalStreamManager { factory: this.storage, storage: storage, source_db: this.system.config.connection!, - lock, - probe: this.system.probe, - errorReporter: this.system.errorReporter + lock }); await stream.terminate(); } finally { await lock.release(); } } catch (e) { - logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); + container.logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); } } } diff --git a/packages/service-core/src/replication/WalStreamRunner.ts b/packages/service-core/src/replication/WalStreamRunner.ts index 6b2857386..5b11fbace 100644 --- a/packages/service-core/src/replication/WalStreamRunner.ts +++ b/packages/service-core/src/replication/WalStreamRunner.ts @@ -6,7 +6,7 @@ import * as util from '../util/util-index.js'; import { ErrorRateLimiter } from './ErrorRateLimiter.js'; import { MissingReplicationSlotError, WalStream } from './WalStream.js'; import { ResolvedConnection } from '../util/config/types.js'; -import { ErrorReporter, logger, ProbeModule } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; export interface WalStreamRunnerOptions { factory: storage.BucketStorageFactory; @@ -14,8 +14,6 @@ export interface WalStreamRunnerOptions { source_db: ResolvedConnection; lock: storage.ReplicationLock; rateLimiter?: ErrorRateLimiter; - probe: ProbeModule; - errorReporter: ErrorReporter; } export class WalStreamRunner { @@ -48,12 +46,12 @@ export class WalStreamRunner { await this.replicateLoop(); } catch (e) { // Fatal exception - this.options.errorReporter.captureException(e, { + container.reporter.captureException(e, { metadata: { replication_slot: this.slot_name } }); - logger.error(`Replication failed on ${this.slot_name}`, e); + container.logger.error(`Replication failed on ${this.slot_name}`, e); if (e instanceof MissingReplicationSlotError) { // This stops replication on this slot, and creates a new slot @@ -94,13 +92,11 @@ export class WalStreamRunner { abort_signal: this.abortController.signal, factory: this.options.factory, storage: this.options.storage, - connections, - probe: this.options.probe, - errorReporter: this.options.errorReporter + connections }); await stream.replicate(); } catch (e) { - logger.error(`Replication error`, e); + container.logger.error(`Replication error`, e); if (e.cause != null) { // Example: // PgError.conn_ended: Unable to do postgres query on ended connection @@ -122,13 +118,13 @@ export class WalStreamRunner { // [Symbol(pg.ErrorResponse)]: undefined // } // Without this additional log, the cause would not be visible in the logs. - logger.error(`cause`, e.cause); + container.logger.error(`cause`, e.cause); } if (e instanceof MissingReplicationSlotError) { throw e; } else { // Report the error if relevant, before retrying - this.options.errorReporter.captureException(e, { + container.reporter.captureException(e, { metadata: { replication_slot: this.slot_name } @@ -148,7 +144,7 @@ export class WalStreamRunner { * This will also release the lock if start() was called earlier. */ async stop(options?: { force?: boolean }) { - logger.info(`${this.slot_name} Stopping replication`); + container.logger.info(`${this.slot_name} Stopping replication`); // End gracefully this.abortController.abort(); @@ -165,7 +161,7 @@ export class WalStreamRunner { * Stops replication if needed. */ async terminate(options?: { force?: boolean }) { - logger.info(`${this.slot_name} Terminating replication`); + container.logger.info(`${this.slot_name} Terminating replication`); await this.stop(options); const slotName = this.slot_name; diff --git a/packages/service-core/src/routes/endpoints/admin.ts b/packages/service-core/src/routes/endpoints/admin.ts index b0b06e5f4..b2f044352 100644 --- a/packages/service-core/src/routes/endpoints/admin.ts +++ b/packages/service-core/src/routes/endpoints/admin.ts @@ -1,4 +1,4 @@ -import * as framework from '@powersync/service-framework'; +import { errors, router, schema } from '@powersync/service-framework'; import { SqlSyncRules, SqliteValue, StaticSchema, isJsonValue, toSyncRulesValue } from '@powersync/service-sync-rules'; import { internal_routes } from '@powersync/service-types'; @@ -11,9 +11,9 @@ import { authApi } from '../auth.js'; const demoCredentials = routeDefinition({ path: '/api/admin/v1/demo-credentials', - method: framework.router.HTTPMethod.POST, + method: router.HTTPMethod.POST, authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.DemoCredentialsRequest, { + validator: schema.createTsCodecValidator(internal_routes.DemoCredentialsRequest, { allowAdditional: true }), handler: async (payload) => { @@ -33,9 +33,9 @@ const demoCredentials = routeDefinition({ export const executeSql = routeDefinition({ path: '/api/admin/v1/execute-sql', - method: framework.router.HTTPMethod.POST, + method: router.HTTPMethod.POST, authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.ExecuteSqlRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(internal_routes.ExecuteSqlRequest, { allowAdditional: true }), handler: async (payload) => { const connection = payload.context.system.config.connection; if (connection == null || !connection.debug_api) { @@ -83,9 +83,9 @@ export const executeSql = routeDefinition({ export const diagnostics = routeDefinition({ path: '/api/admin/v1/diagnostics', - method: framework.HTTPMethod.POST, + method: router.HTTPMethod.POST, authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.DiagnosticsRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(internal_routes.DiagnosticsRequest, { allowAdditional: true }), handler: async (payload) => { const include_content = payload.params.sync_rules_content ?? false; const system = payload.context.system; @@ -123,9 +123,9 @@ export const diagnostics = routeDefinition({ export const getSchema = routeDefinition({ path: '/api/admin/v1/schema', - method: framework.router.HTTPMethod.POST, + method: router.HTTPMethod.POST, authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.GetSchemaRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(internal_routes.GetSchemaRequest, { allowAdditional: true }), handler: async (payload) => { const system = payload.context.system; @@ -135,9 +135,9 @@ export const getSchema = routeDefinition({ export const reprocess = routeDefinition({ path: '/api/admin/v1/reprocess', - method: framework.router.HTTPMethod.POST, + method: router.HTTPMethod.POST, authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.ReprocessRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(internal_routes.ReprocessRequest, { allowAdditional: true }), handler: async (payload) => { const system = payload.context.system; @@ -149,7 +149,7 @@ export const reprocess = routeDefinition({ const active = await storage.getActiveSyncRules(); if (active == null) { - throw new framework.errors.JourneyError({ + throw new errors.JourneyError({ status: 422, code: 'NO_SYNC_RULES', description: 'No active sync rules' @@ -174,9 +174,9 @@ export const reprocess = routeDefinition({ export const validate = routeDefinition({ path: '/api/admin/v1/validate', - method: framework.router.HTTPMethod.POST, + method: router.HTTPMethod.POST, authorize: authApi, - validator: framework.schema.createTsCodecValidator(internal_routes.ValidateRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(internal_routes.ValidateRequest, { allowAdditional: true }), handler: async (payload) => { const system = payload.context.system; diff --git a/packages/service-core/src/routes/endpoints/checkpointing.ts b/packages/service-core/src/routes/endpoints/checkpointing.ts index 44d7d59ff..14d6aebb2 100644 --- a/packages/service-core/src/routes/endpoints/checkpointing.ts +++ b/packages/service-core/src/routes/endpoints/checkpointing.ts @@ -1,5 +1,5 @@ import * as t from 'ts-codec'; -import * as framework from '@powersync/service-framework'; +import { router, schema } from '@powersync/service-framework'; import * as util from '../../util/util-index.js'; import { authUser } from '../auth.js'; @@ -9,9 +9,9 @@ const WriteCheckpointRequest = t.object({}); export const writeCheckpoint = routeDefinition({ path: '/write-checkpoint.json', - method: framework.router.HTTPMethod.GET, + method: router.HTTPMethod.GET, authorize: authUser, - validator: framework.schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), handler: async (payload) => { const system = payload.context.system; const storage = system.storage; @@ -25,9 +25,9 @@ export const writeCheckpoint = routeDefinition({ export const writeCheckpoint2 = routeDefinition({ path: '/write-checkpoint2.json', - method: framework.router.HTTPMethod.GET, + method: router.HTTPMethod.GET, authorize: authUser, - validator: framework.schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(WriteCheckpointRequest, { allowAdditional: true }), handler: async (payload) => { const { user_id, system } = payload.context; const storage = system.storage; diff --git a/packages/service-core/src/routes/endpoints/dev.ts b/packages/service-core/src/routes/endpoints/dev.ts index 799d45ce4..b7f28c968 100644 --- a/packages/service-core/src/routes/endpoints/dev.ts +++ b/packages/service-core/src/routes/endpoints/dev.ts @@ -1,6 +1,6 @@ import * as t from 'ts-codec'; -import * as framework from '@powersync/service-framework'; import * as pgwire from '@powersync/service-jpgwire'; +import { errors, router, schema } from '@powersync/service-framework'; import * as util from '../../util/util-index.js'; import { authDevUser, authUser, endpoint, issueDevToken, issueLegacyDevToken, issuePowerSyncToken } from '../auth.js'; @@ -14,42 +14,42 @@ const AuthParams = t.object({ // For legacy web client only. Remove soon. export const auth = routeDefinition({ path: '/auth.json', - method: framework.router.HTTPMethod.POST, - validator: framework.schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), + method: router.HTTPMethod.POST, + validator: schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), handler: async (payload) => { const { user, password } = payload.params; const config = payload.context.system.config; if (config.dev.demo_auth == false || config.dev.demo_password == null) { - throw new framework.errors.AuthorizationError(['Demo auth disabled']); + throw new errors.AuthorizationError(['Demo auth disabled']); } if (password == config.dev.demo_password) { const token = await issueLegacyDevToken(payload.request, user, payload.context.system.config); return { token, user_id: user, endpoint: endpoint(payload.request) }; } else { - throw new framework.errors.AuthorizationError(['Authentication failed']); + throw new errors.AuthorizationError(['Authentication failed']); } } }); export const auth2 = routeDefinition({ path: '/dev/auth.json', - method: framework.router.HTTPMethod.POST, - validator: framework.schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), + method: router.HTTPMethod.POST, + validator: schema.createTsCodecValidator(AuthParams, { allowAdditional: true }), handler: async (payload) => { const { user, password } = payload.params; const config = payload.context.system.config; if (config.dev.demo_auth == false || config.dev.demo_password == null) { - throw new framework.errors.AuthorizationError(['Demo auth disabled']); + throw new errors.AuthorizationError(['Demo auth disabled']); } if (password == config.dev.demo_password) { const token = await issueDevToken(payload.request, user, payload.context.system.config); return { token, user_id: user }; } else { - throw new framework.errors.AuthorizationError(['Authentication failed']); + throw new errors.AuthorizationError(['Authentication failed']); } } }); @@ -58,8 +58,8 @@ const TokenParams = t.object({}); export const token = routeDefinition({ path: '/dev/token.json', - method: framework.router.HTTPMethod.POST, - validator: framework.schema.createTsCodecValidator(TokenParams, { allowAdditional: true }), + method: router.HTTPMethod.POST, + validator: schema.createTsCodecValidator(TokenParams, { allowAdditional: true }), authorize: authDevUser, handler: async (payload) => { const { user_id } = payload.context; @@ -89,8 +89,8 @@ const CrudRequest = t.object({ export const crud = routeDefinition({ path: '/crud.json', - method: framework.router.HTTPMethod.POST, - validator: framework.schema.createTsCodecValidator(CrudRequest, { allowAdditional: true }), + method: router.HTTPMethod.POST, + validator: schema.createTsCodecValidator(CrudRequest, { allowAdditional: true }), authorize: authUser, handler: async (payload) => { diff --git a/packages/service-core/src/routes/endpoints/socket-route.ts b/packages/service-core/src/routes/endpoints/socket-route.ts index f4096ae99..3aa785b55 100644 --- a/packages/service-core/src/routes/endpoints/socket-route.ts +++ b/packages/service-core/src/routes/endpoints/socket-route.ts @@ -1,6 +1,6 @@ import { serialize } from 'bson'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; -import * as framework from '@powersync/service-framework'; +import { container, errors, schema } from '@powersync/service-framework'; import * as util from '../../util/util-index.js'; import { streamResponse } from '../../sync/sync.js'; @@ -16,13 +16,13 @@ export const syncStreamReactive: SocketRouteGenerator = (router) => errors: ['Authentication required'].concat(context.token_errors ?? []) }; }, - validator: framework.schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }), handler: async ({ context, params, responder, observer, initialN }) => { const { system } = context; if (system.closed) { responder.onError( - new framework.errors.JourneyError({ + new errors.JourneyError({ status: 503, code: 'SERVICE_UNAVAILABLE', description: 'Service temporarily unavailable' @@ -44,7 +44,7 @@ export const syncStreamReactive: SocketRouteGenerator = (router) => const cp = await storage.getActiveCheckpoint(); if (!cp.hasSyncRules()) { responder.onError( - new framework.errors.JourneyError({ + new errors.JourneyError({ status: 500, code: 'NO_SYNC_RULES', description: 'No sync rules available' @@ -122,8 +122,8 @@ export const syncStreamReactive: SocketRouteGenerator = (router) => } catch (ex) { // Convert to our standard form before responding. // This ensures the error can be serialized. - const error = new framework.errors.InternalServerError(ex); - framework.logger.error('Sync stream error', error); + const error = new errors.InternalServerError(ex); + container.logger.error('Sync stream error', error); responder.onError(error); } finally { responder.onComplete(); diff --git a/packages/service-core/src/routes/endpoints/sync-rules.ts b/packages/service-core/src/routes/endpoints/sync-rules.ts index 77e052639..e6d786a6d 100644 --- a/packages/service-core/src/routes/endpoints/sync-rules.ts +++ b/packages/service-core/src/routes/endpoints/sync-rules.ts @@ -1,7 +1,7 @@ import * as t from 'ts-codec'; import { FastifyPluginAsync } from 'fastify'; -import * as framework from '@powersync/service-framework'; import * as pgwire from '@powersync/service-jpgwire'; +import { errors, router, schema } from '@powersync/service-framework'; import { SqlSyncRules, SyncRulesErrors } from '@powersync/service-sync-rules'; import * as replication from '../../replication/replication-index.js'; @@ -33,15 +33,15 @@ yamlPlugin[Symbol.for('skip-override')] = true; export const deploySyncRules = routeDefinition({ path: '/api/sync-rules/v1/deploy', - method: framework.router.HTTPMethod.POST, + method: router.HTTPMethod.POST, authorize: authApi, parse: true, plugins: [yamlPlugin], - validator: framework.schema.createTsCodecValidator(DeploySyncRulesRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(DeploySyncRulesRequest, { allowAdditional: true }), handler: async (payload) => { if (payload.context.system.config.sync_rules.present) { // If sync rules are configured via the config, disable deploy via the API. - throw new framework.errors.JourneyError({ + throw new errors.JourneyError({ status: 422, code: 'API_DISABLED', description: 'Sync rules API disabled', @@ -53,7 +53,7 @@ export const deploySyncRules = routeDefinition({ try { SqlSyncRules.fromYaml(payload.params.content); } catch (e) { - throw new framework.errors.JourneyError({ + throw new errors.JourneyError({ status: 422, code: 'INVALID_SYNC_RULES', description: 'Sync rules parsing failed', @@ -77,11 +77,11 @@ const ValidateSyncRulesRequest = t.object({ export const validateSyncRules = routeDefinition({ path: '/api/sync-rules/v1/validate', - method: framework.router.HTTPMethod.POST, + method: router.HTTPMethod.POST, authorize: authApi, parse: true, plugins: [yamlPlugin], - validator: framework.schema.createTsCodecValidator(ValidateSyncRulesRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(ValidateSyncRulesRequest, { allowAdditional: true }), handler: async (payload) => { const content = payload.params.content; @@ -93,13 +93,13 @@ export const validateSyncRules = routeDefinition({ export const currentSyncRules = routeDefinition({ path: '/api/sync-rules/v1/current', - method: framework.router.HTTPMethod.GET, + method: router.HTTPMethod.GET, authorize: authApi, handler: async (payload) => { const storage = payload.context.system.storage; const sync_rules = await storage.getActiveSyncRulesContent(); if (!sync_rules) { - throw new framework.errors.JourneyError({ + throw new errors.JourneyError({ status: 422, code: 'NO_SYNC_RULES', description: 'No active sync rules' @@ -136,14 +136,14 @@ const ReprocessSyncRulesRequest = t.object({}); export const reprocessSyncRules = routeDefinition({ path: '/api/sync-rules/v1/reprocess', - method: framework.router.HTTPMethod.POST, + method: router.HTTPMethod.POST, authorize: authApi, - validator: framework.schema.createTsCodecValidator(ReprocessSyncRulesRequest), + validator: schema.createTsCodecValidator(ReprocessSyncRulesRequest), handler: async (payload) => { const storage = payload.context.system.storage; const sync_rules = await storage.getActiveSyncRules(); if (sync_rules == null) { - throw new framework.errors.JourneyError({ + throw new errors.JourneyError({ status: 422, code: 'NO_SYNC_RULES', description: 'No active sync rules' @@ -162,7 +162,7 @@ export const reprocessSyncRules = routeDefinition({ export const SYNC_RULES_ROUTES = [validateSyncRules, deploySyncRules, reprocessSyncRules, currentSyncRules]; function replyPrettyJson(payload: any) { - return new framework.router.RouterResponse({ + return new router.RouterResponse({ status: 200, data: JSON.stringify(payload, null, 2) + '\n', headers: { 'Content-Type': 'application/json' } diff --git a/packages/service-core/src/routes/endpoints/sync-stream.ts b/packages/service-core/src/routes/endpoints/sync-stream.ts index af216e0e6..2ecf194d9 100644 --- a/packages/service-core/src/routes/endpoints/sync-stream.ts +++ b/packages/service-core/src/routes/endpoints/sync-stream.ts @@ -1,6 +1,6 @@ import { Readable } from 'stream'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; -import * as framework from '@powersync/service-framework'; +import { container, errors, router, schema } from '@powersync/service-framework'; import * as sync from '../../sync/sync-index.js'; import * as util from '../../util/util-index.js'; @@ -15,14 +15,14 @@ export enum SyncRoutes { export const syncStreamed = routeDefinition({ path: SyncRoutes.STREAM, - method: framework.router.HTTPMethod.POST, + method: router.HTTPMethod.POST, authorize: authUser, - validator: framework.schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }), + validator: schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }), handler: async (payload) => { const system = payload.context.system; if (system.closed) { - throw new framework.errors.JourneyError({ + throw new errors.JourneyError({ status: 503, code: 'SERVICE_UNAVAILABLE', description: 'Service temporarily unavailable' @@ -39,7 +39,7 @@ export const syncStreamed = routeDefinition({ // Sanity check before we start the stream const cp = await storage.getActiveCheckpoint(); if (!cp.hasSyncRules()) { - throw new framework.errors.JourneyError({ + throw new errors.JourneyError({ status: 500, code: 'NO_SYNC_RULES', description: 'No sync rules available' @@ -76,11 +76,11 @@ export const syncStreamed = routeDefinition({ controller.abort(); // Note: This appears as a 200 response in the logs. if (error.message != 'Shutting down system') { - framework.logger.error('Streaming sync request failed', error); + container.logger.error('Streaming sync request failed', error); } }); - return new framework.router.RouterResponse({ + return new router.RouterResponse({ status: 200, headers: { 'Content-Type': 'application/x-ndjson' diff --git a/packages/service-core/src/routes/hooks.ts b/packages/service-core/src/routes/hooks.ts index e2ddae4f1..984d56a05 100644 --- a/packages/service-core/src/routes/hooks.ts +++ b/packages/service-core/src/routes/hooks.ts @@ -1,7 +1,7 @@ import type fastify from 'fastify'; import a from 'async'; -import * as framework from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; export type CreateRequestQueueParams = { max_queue_depth: number; @@ -22,7 +22,7 @@ export const createRequestQueueHook = (params: CreateRequestQueueParams): fastif (params.max_queue_depth == 0 && request_queue.running() == params.concurrency) || (params.max_queue_depth > 0 && request_queue.length() >= params.max_queue_depth) ) { - framework.logger.warn(`${request.method} ${request.url}`, { + container.logger.warn(`${request.method} ${request.url}`, { status: 429, method: request.method, path: request.url, diff --git a/packages/service-core/src/routes/route-register.ts b/packages/service-core/src/routes/route-register.ts index ccc5fba98..ac2011105 100644 --- a/packages/service-core/src/routes/route-register.ts +++ b/packages/service-core/src/routes/route-register.ts @@ -1,6 +1,6 @@ import fastify from 'fastify'; -import * as framework from '@powersync/service-framework'; +import { container, errors, router, HTTPMethod } from '@powersync/service-framework'; import { Context, ContextProvider, RequestEndpoint, RequestEndpointHandlerPayload } from './router.js'; export type FastifyEndpoint = RequestEndpoint & { @@ -21,10 +21,10 @@ export function registerFastifyRoutes( app.register(async function (fastify) { fastify.route({ url: e.path, - method: e.method as framework.HTTPMethod, + method: e.method as HTTPMethod, handler: async (request, reply) => { const startTime = new Date(); - let response: framework.router.RouterResponse; + let response: router.RouterResponse; try { const context = await contextProvider(request); @@ -46,27 +46,25 @@ export function registerFastifyRoutes( request }; - const endpointResponse = await framework.router.executeEndpoint(e, payload); + const endpointResponse = await router.executeEndpoint(e, payload); - if (framework.RouterResponse.isRouterResponse(endpointResponse)) { + if (router.RouterResponse.isRouterResponse(endpointResponse)) { response = endpointResponse; - } else if (framework.router.isAsyncIterable(endpointResponse) || Buffer.isBuffer(endpointResponse)) { - response = new framework.router.RouterResponse({ + } else if (router.isAsyncIterable(endpointResponse) || Buffer.isBuffer(endpointResponse)) { + response = new router.RouterResponse({ status: 200, data: endpointResponse }); } else { - response = new framework.router.RouterResponse({ + response = new router.RouterResponse({ status: 200, data: { data: endpointResponse } }); } } catch (ex) { - const journeyError = framework.errors.JourneyError.isJourneyError(ex) - ? ex - : new framework.errors.InternalServerError(ex); + const journeyError = errors.JourneyError.isJourneyError(ex) ? ex : new errors.InternalServerError(ex); - response = new framework.RouterResponse({ + response = new router.RouterResponse({ status: journeyError.errorData.status || 500, headers: { 'Content-Type': 'application/json' @@ -85,7 +83,7 @@ export function registerFastifyRoutes( await reply.send(response.data); } finally { await response.afterSend?.(); - framework.logger.info(`${e.method} ${request.url}`, { + container.logger.info(`${e.method} ${request.url}`, { duration_ms: Math.round(new Date().valueOf() - startTime.valueOf() + Number.EPSILON), status: response.status, method: e.method, diff --git a/packages/service-core/src/routes/router.ts b/packages/service-core/src/routes/router.ts index c83cffaf4..6972fff1a 100644 --- a/packages/service-core/src/routes/router.ts +++ b/packages/service-core/src/routes/router.ts @@ -1,4 +1,4 @@ -import * as framework from '@powersync/service-framework'; +import { router } from '@powersync/service-framework'; import * as auth from '../auth/auth-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; @@ -26,13 +26,13 @@ export type RequestEndpoint< O, C = Context, Payload = RequestEndpointHandlerPayload -> = framework.router.Endpoint & {}; +> = router.Endpoint & {}; export type RequestEndpointHandlerPayload< I = any, C = Context, Request = BasicRouterRequest -> = framework.router.EndpointHandlerPayload & { +> = router.EndpointHandlerPayload & { request: Request; }; diff --git a/packages/service-core/src/runner/teardown.ts b/packages/service-core/src/runner/teardown.ts index 8c070e502..0cf8a9495 100644 --- a/packages/service-core/src/runner/teardown.ts +++ b/packages/service-core/src/runner/teardown.ts @@ -5,11 +5,12 @@ import * as timers from 'timers/promises'; +import { container } from '@powersync/service-framework'; + import * as db from '../db/db-index.js'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; import * as replication from '../replication/replication-index.js'; -import { logger, createFSProbe, ErrorReporter } from '@powersync/service-framework'; /** * Attempt to terminate a single sync rules instance. @@ -19,8 +20,7 @@ import { logger, createFSProbe, ErrorReporter } from '@powersync/service-framewo async function terminateReplicator( storageFactory: storage.BucketStorageFactory, connection: utils.ResolvedConnection, - syncRules: storage.PersistedSyncRulesContent, - errorReporter: ErrorReporter + syncRules: storage.PersistedSyncRulesContent ) { // The lock may still be active if the current replication instance // hasn't stopped yet. @@ -32,14 +32,12 @@ async function terminateReplicator( factory: storageFactory, storage: storage, source_db: connection, - lock, - probe: createFSProbe(), - errorReporter + lock }); - logger.info(`Terminating replication slot ${stream.slot_name}`); + container.logger.info(`Terminating replication slot ${stream.slot_name}`); await stream.terminate(); - logger.info(`Terminated replication slot ${stream.slot_name}`); + container.logger.info(`Terminated replication slot ${stream.slot_name}`); } finally { await lock.release(); } @@ -55,8 +53,7 @@ async function terminateReplicator( */ async function terminateReplicators( storageFactory: storage.BucketStorageFactory, - connection: utils.ResolvedConnection, - errorReporter: ErrorReporter + connection: utils.ResolvedConnection ) { const start = Date.now(); while (Date.now() - start < 12_000) { @@ -64,11 +61,11 @@ async function terminateReplicators( const replicationRules = await storageFactory.getReplicatingSyncRules(); for (let syncRules of replicationRules) { try { - await terminateReplicator(storageFactory, connection, syncRules, errorReporter); + await terminateReplicator(storageFactory, connection, syncRules); } catch (e) { retry = true; console.error(e); - logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); + container.logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); } } if (!retry) { @@ -80,26 +77,26 @@ async function terminateReplicators( // TODO should there be a global context for things like alerting? -export async function teardown(runnerConfig: utils.RunnerConfig, errorReporter?: ErrorReporter) { +export async function teardown(runnerConfig: utils.RunnerConfig) { const config = await utils.loadConfig(runnerConfig); const mongoDB = storage.createPowerSyncMongo(config.storage); try { - logger.info(`Waiting for auth`); + container.logger.info(`Waiting for auth`); await db.mongo.waitForAuth(mongoDB.db); const bucketStorage = new storage.MongoBucketStorage(mongoDB, { slot_name_prefix: config.slot_name_prefix }); const connection = config.connection; - logger.info(`Terminating replication slots`); + container.logger.info(`Terminating replication slots`); if (connection) { await terminateReplicators(bucketStorage, connection); } const database = mongoDB.db; - logger.info(`Dropping database ${database.namespace}`); + container.logger.info(`Dropping database ${database.namespace}`); await database.dropDatabase(); - logger.info(`Done`); + container.logger.info(`Done`); await mongoDB.client.close(); // If there was an error connecting to postgress, the process may stay open indefinitely. @@ -107,7 +104,7 @@ export async function teardown(runnerConfig: utils.RunnerConfig, errorReporter?: // We do not consider those errors a teardown failure. process.exit(0); } catch (e) { - logger.error(`Teardown failure`, e); + container.logger.error(`Teardown failure`, e); await mongoDB.client.close(); process.exit(1); } diff --git a/packages/service-core/src/storage/MongoBucketStorage.ts b/packages/service-core/src/storage/MongoBucketStorage.ts index 39357b72d..b3fde19b7 100644 --- a/packages/service-core/src/storage/MongoBucketStorage.ts +++ b/packages/service-core/src/storage/MongoBucketStorage.ts @@ -24,7 +24,7 @@ import { PowerSyncMongo, PowerSyncMongoOptions } from './mongo/db.js'; import { SyncRuleDocument, SyncRuleState } from './mongo/models.js'; import { generateSlotName } from './mongo/util.js'; import { v4 as uuid } from 'uuid'; -import { ErrorReporter, logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; export interface MongoBucketStorageOptions extends PowerSyncMongoOptions {} @@ -32,7 +32,6 @@ export class MongoBucketStorage implements BucketStorageFactory { private readonly client: mongo.MongoClient; private readonly session: mongo.ClientSession; public readonly slot_name_prefix: string; - protected errorReporter: ErrorReporter; private readonly storageCache = new LRUCache({ max: 3, @@ -55,11 +54,10 @@ export class MongoBucketStorage implements BucketStorageFactory { public readonly db: PowerSyncMongo; - constructor(db: PowerSyncMongo, options: { slot_name_prefix: string; errorReporter: ErrorReporter }) { + constructor(db: PowerSyncMongo, options: { slot_name_prefix: string }) { this.client = db.client; this.db = db; this.session = this.client.startSession(); - this.errorReporter = options.errorReporter; this.slot_name_prefix = options.slot_name_prefix; } @@ -68,7 +66,7 @@ export class MongoBucketStorage implements BucketStorageFactory { if ((typeof id as any) == 'bigint') { id = Number(id); } - return new MongoSyncBucketStorage(this, id, sync_rules, slot_name, this.errorReporter); + return new MongoSyncBucketStorage(this, id, sync_rules, slot_name); } async configureSyncRules(sync_rules: string, options?: { lock?: boolean }) { @@ -76,13 +74,13 @@ export class MongoBucketStorage implements BucketStorageFactory { const active = await this.getActiveSyncRulesContent(); if (next?.sync_rules_content == sync_rules) { - logger.info('Sync rules from configuration unchanged'); + container.logger.info('Sync rules from configuration unchanged'); return { updated: false }; } else if (next == null && active?.sync_rules_content == sync_rules) { - logger.info('Sync rules from configuration unchanged'); + container.logger.info('Sync rules from configuration unchanged'); return { updated: false }; } else { - logger.info('Sync rules updated from configuration'); + container.logger.info('Sync rules updated from configuration'); const persisted_sync_rules = await this.updateSyncRules({ content: sync_rules, lock: options?.lock diff --git a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts index 9ca151847..325c619d7 100644 --- a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts +++ b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts @@ -2,9 +2,9 @@ import { SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules'; import * as bson from 'bson'; import * as mongo from 'mongodb'; -import * as framework from '@powersync/service-framework'; import * as util from '../../util/util-index.js'; import * as replication from '../../replication/replication-index.js'; +import { container, errors } from '@powersync/service-framework'; import { BucketStorageBatch, FlushedResult, mergeToast, SaveOptions } from '../BucketStorage.js'; import { SourceTable } from '../SourceTable.js'; import { PowerSyncMongo } from './db.js'; @@ -61,8 +61,7 @@ export class MongoBucketBatch implements BucketStorageBatch { group_id: number, slot_name: string, last_checkpoint_lsn: string | null, - no_checkpoint_before_lsn: string | null, - protected errorReporter: framework.ErrorReporter + no_checkpoint_before_lsn: string | null ) { this.db = db; this.client = db.client; @@ -279,10 +278,10 @@ export class MongoBucketBatch implements BucketStorageBatch { ); afterData = new bson.Binary(bson.serialize(after!)); - this.errorReporter.captureMessage( + container.reporter.captureMessage( `Data too big on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${e.message}`, { - level: framework.errors.ErrorSeverity.WARNING, + level: errors.ErrorSeverity.WARNING, metadata: { replication_slot: this.slot_name, table: record.sourceTable.qualifiedName @@ -330,23 +329,23 @@ export class MongoBucketBatch implements BucketStorageBatch { if (afterId && after && util.isCompleteRow(after)) { // Insert or update if (sourceTable.syncData) { - const { results: evaluated, errors } = this.sync_rules.evaluateRowWithErrors({ + const { results: evaluated, errors: syncErrors } = this.sync_rules.evaluateRowWithErrors({ record: after, sourceTable }); - for (let error of errors) { - this.errorReporter.captureMessage( + for (let error of syncErrors) { + container.reporter.captureMessage( `Failed to evaluate data query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}`, { - level: framework.errors.ErrorSeverity.WARNING, + level: errors.ErrorSeverity.WARNING, metadata: { replication_slot: this.slot_name, table: record.sourceTable.qualifiedName } } ); - framework.logger.error( + container.logger.error( `Failed to evaluate data query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}` ); } @@ -376,17 +375,17 @@ export class MongoBucketBatch implements BucketStorageBatch { ); for (let error of paramErrors) { - this.errorReporter.captureMessage( + container.reporter.captureMessage( `Failed to evaluate parameter query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}`, { - level: framework.errors.ErrorSeverity.WARNING, + level: errors.ErrorSeverity.WARNING, metadata: { replication_slot: this.slot_name, table: record.sourceTable.qualifiedName } } ); - framework.logger.error( + container.logger.error( `Failed to evaluate parameter query on ${record.sourceTable.qualifiedName}.${after.id}: ${error.error}` ); } @@ -440,7 +439,7 @@ export class MongoBucketBatch implements BucketStorageBatch { if (e instanceof mongo.MongoError && e.hasErrorLabel('TransientTransactionError')) { // Likely write conflict caused by concurrent write stream replicating } else { - framework.logger.warn('Transaction error', e as Error); + container.logger.warn('Transaction error', e as Error); } await new Promise((resolve) => setTimeout(resolve, Math.random() * 50)); throw e; @@ -465,7 +464,7 @@ export class MongoBucketBatch implements BucketStorageBatch { await this.withTransaction(async () => { flushTry += 1; if (flushTry % 10 == 0) { - framework.logger.info(`${this.slot_name} ${description} - try ${flushTry}`); + container.logger.info(`${this.slot_name} ${description} - try ${flushTry}`); } if (flushTry > 20 && Date.now() > lastTry) { throw new Error('Max transaction tries exceeded'); @@ -530,11 +529,11 @@ export class MongoBucketBatch implements BucketStorageBatch { if (this.last_checkpoint_lsn != null && lsn <= this.last_checkpoint_lsn) { // When re-applying transactions, don't create a new checkpoint until // we are past the last transaction. - framework.logger.info(`Re-applied transaction ${lsn} - skipping checkpoint`); + container.logger.info(`Re-applied transaction ${lsn} - skipping checkpoint`); return false; } if (lsn < this.no_checkpoint_before_lsn) { - framework.logger.info( + container.logger.info( `Waiting until ${this.no_checkpoint_before_lsn} before creating checkpoint, currently at ${lsn}` ); return false; @@ -600,7 +599,7 @@ export class MongoBucketBatch implements BucketStorageBatch { } async save(record: SaveOptions): Promise { - framework.logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`); + container.logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`); this.batch ??= new OperationBatch(); this.batch.push(new RecordOperation(record)); diff --git a/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts b/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts index 8a98049e7..d1db697d4 100644 --- a/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts +++ b/packages/service-core/src/storage/mongo/MongoSyncBucketStorage.ts @@ -22,7 +22,6 @@ import { PowerSyncMongo } from './db.js'; import { BucketDataDocument, BucketDataKey, SourceKey, SyncRuleState } from './models.js'; import { MongoBucketBatch } from './MongoBucketBatch.js'; import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, readSingleBatch, serializeLookup } from './util.js'; -import { ErrorReporter } from '@powersync/service-framework'; import { ChecksumCache, FetchPartialBucketChecksum } from '../ChecksumCache.js'; export class MongoSyncBucketStorage implements SyncRulesBucketStorage { @@ -37,8 +36,7 @@ export class MongoSyncBucketStorage implements SyncRulesBucketStorage { public readonly factory: MongoBucketStorage, public readonly group_id: number, public readonly sync_rules: SqlSyncRules, - public readonly slot_name: string, - protected errorReporter: ErrorReporter + public readonly slot_name: string ) { this.db = factory.db; } @@ -71,8 +69,7 @@ export class MongoSyncBucketStorage implements SyncRulesBucketStorage { this.group_id, this.slot_name, checkpoint_lsn, - doc?.no_checkpoint_before ?? null, - this.errorReporter + doc?.no_checkpoint_before ?? null ); try { await callback(batch); diff --git a/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts b/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts index 902e19229..ad407bdb7 100644 --- a/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts +++ b/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts @@ -2,7 +2,7 @@ import crypto from 'crypto'; import { PersistedSyncRulesContent, ReplicationLock } from '../BucketStorage.js'; import { PowerSyncMongo } from './db.js'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; /** * Manages a lock on a sync rules document, so that only one process @@ -40,7 +40,7 @@ export class MongoSyncRulesLock implements ReplicationLock { try { await this.refresh(); } catch (e) { - logger.error('Failed to refresh lock', e); + container.logger.error('Failed to refresh lock', e); clearInterval(this.refreshInterval); } }, 30_130); @@ -59,7 +59,7 @@ export class MongoSyncRulesLock implements ReplicationLock { ); if (result.modifiedCount == 0) { // Log and ignore - logger.warn(`Lock already released: ${this.sync_rules_id}/${this.lock_id}`); + container.logger.warn(`Lock already released: ${this.sync_rules_id}/${this.lock_id}`); } } diff --git a/packages/service-core/src/storage/mongo/PersistedBatch.ts b/packages/service-core/src/storage/mongo/PersistedBatch.ts index f3b1f4fec..7f9162e7b 100644 --- a/packages/service-core/src/storage/mongo/PersistedBatch.ts +++ b/packages/service-core/src/storage/mongo/PersistedBatch.ts @@ -16,7 +16,7 @@ import { SourceKey } from './models.js'; import { serializeLookup } from './util.js'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; /** * Maximum size of operations we write in a single transaction. @@ -253,7 +253,7 @@ export class PersistedBatch { }); } - logger.info( + container.logger.info( `powersync_${this.group_id} Flushed ${this.bucketData.length} + ${this.bucketParameters.length} + ${ this.currentData.length } updates, ${Math.round(this.currentSize / 1024)}kb. Last op_id: ${this.debugLastOpId}` diff --git a/packages/service-core/src/sync/sync.ts b/packages/service-core/src/sync/sync.ts index b017d1b09..f696455a9 100644 --- a/packages/service-core/src/sync/sync.ts +++ b/packages/service-core/src/sync/sync.ts @@ -1,6 +1,6 @@ import { JSONBig, JsonContainer } from '@powersync/service-jsonbig'; import { SyncParameters } from '@powersync/service-sync-rules'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; import { Semaphore } from 'async-mutex'; import { AbortError } from 'ix/aborterror.js'; @@ -141,7 +141,7 @@ async function* streamResponseInner( message += `buckets: ${allBuckets.length} | `; message += `updated: ${limitedBuckets(diff.updatedBuckets, 20)} | `; message += `removed: ${limitedBuckets(diff.removedBuckets, 20)} | `; - logger.info(message); + container.logger.info(message); const checksum_line: util.StreamingSyncCheckpointDiff = { checkpoint_diff: { @@ -156,7 +156,7 @@ async function* streamResponseInner( } else { let message = `New checkpoint: ${checkpoint} | write: ${writeCheckpoint} | `; message += `buckets: ${allBuckets.length} ${limitedBuckets(allBuckets, 20)}`; - logger.info(message); + container.logger.info(message); bucketsToFetch = allBuckets; const checksum_line: util.StreamingSyncCheckpoint = { checkpoint: { @@ -246,7 +246,7 @@ async function* bucketDataBatch(request: BucketDataRequest) { if (r.data.length == 0) { continue; } - logger.debug(`Sending data for ${r.bucket}`); + container.logger.debug(`Sending data for ${r.bucket}`); let send_data: any; if (binary_data) { diff --git a/packages/service-core/src/system/CorePowerSyncSystem.ts b/packages/service-core/src/system/CorePowerSyncSystem.ts index a00cef19d..15b668ecd 100644 --- a/packages/service-core/src/system/CorePowerSyncSystem.ts +++ b/packages/service-core/src/system/CorePowerSyncSystem.ts @@ -1,30 +1,18 @@ import * as pgwire from '@powersync/service-jpgwire'; -import { - ErrorReporter, - LifeCycledSystem, - LifeCycledSystemOptions, - logger, - NoOpReporter -} from '@powersync/service-framework'; +import { LifeCycledSystem, container } from '@powersync/service-framework'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; -export interface CorePowerSyncSystemOptions extends LifeCycledSystemOptions { - errorReporter?: ErrorReporter; -} - export abstract class CorePowerSyncSystem extends LifeCycledSystem { abstract storage: storage.BucketStorageFactory; abstract pgwire_pool?: pgwire.PgClient; - errorReporter: ErrorReporter; closed: boolean; protected stopHandlers: Set<() => void> = new Set(); - constructor(public config: utils.ResolvedPowerSyncConfig, options?: CorePowerSyncSystemOptions) { - super(options); - this.errorReporter = options?.errorReporter ?? NoOpReporter; + constructor(public config: utils.ResolvedPowerSyncConfig) { + super(); this.closed = false; } @@ -43,12 +31,12 @@ export abstract class CorePowerSyncSystem extends LifeCycledSystem { * This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit */ addTerminationHandler() { - this.terminationHandler.handleTerminationSignal(async () => { + container.terminationHandler.handleTerminationSignal(async () => { // Close open streams, so that they don't block the server from closing. // Note: This does not work well when streaming requests are queued. In that case, the server still doesn't // close in the 30-second timeout. this.closed = true; - logger.info(`Closing ${this.stopHandlers.size} streams`); + container.logger.info(`Closing ${this.stopHandlers.size} streams`); for (let handler of this.stopHandlers) { handler(); } diff --git a/packages/service-core/src/util/config/collectors/config-collector.ts b/packages/service-core/src/util/config/collectors/config-collector.ts index dc2068dc8..06228d468 100644 --- a/packages/service-core/src/util/config/collectors/config-collector.ts +++ b/packages/service-core/src/util/config/collectors/config-collector.ts @@ -2,7 +2,7 @@ import * as t from 'ts-codec'; import * as yaml from 'yaml'; import { configFile } from '@powersync/service-types'; -import * as framework from '@powersync/service-framework'; +import { schema } from '@powersync/service-framework'; import { RunnerConfig } from '../types.js'; @@ -24,7 +24,7 @@ export enum ConfigFileFormat { const YAML_ENV_PREFIX = 'PS_'; // ts-codec itself doesn't give great validation errors, so we use json schema for that -const configSchemaValidator = framework.schema +const configSchemaValidator = schema .parseJSONSchema( t.generateJSONSchema(configFile.powerSyncConfig, { allowAdditional: true, parsers: [configFile.portParser] }) ) diff --git a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts index ee2d815a8..57b53302a 100644 --- a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts +++ b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts @@ -1,7 +1,7 @@ import * as fs from 'fs/promises'; import * as path from 'path'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; import { ConfigCollector, ConfigFileFormat } from '../config-collector.js'; import { RunnerConfig } from '../../types.js'; @@ -25,7 +25,7 @@ export class FileSystemConfigCollector extends ConfigCollector { throw new Error(`Config file path ${resolvedPath} was specified, but the file does not exist.`); } - logger.info(`Collecting PowerSync configuration from File: ${resolvedPath}`); + container.logger.info(`Collecting PowerSync configuration from File: ${resolvedPath}`); const content = await fs.readFile(resolvedPath, 'utf-8'); let contentType: ConfigFileFormat | undefined; diff --git a/packages/service-core/src/util/config/compound-config-collector.ts b/packages/service-core/src/util/config/compound-config-collector.ts index 8a8009974..0fda246b2 100644 --- a/packages/service-core/src/util/config/compound-config-collector.ts +++ b/packages/service-core/src/util/config/compound-config-collector.ts @@ -9,7 +9,7 @@ import { Base64SyncRulesCollector } from './sync-rules/impl/base64-sync-rules-co import { InlineSyncRulesCollector } from './sync-rules/impl/inline-sync-rules-collector.js'; import { FileSystemSyncRulesCollector } from './sync-rules/impl/filesystem-sync-rules-collector.js'; import { FallbackConfigCollector } from './collectors/impl/fallback-config-collector.js'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; const POWERSYNC_DEV_KID = 'powersync-dev'; @@ -140,7 +140,7 @@ export class CompoundConfigCollector { if (baseConfig) { return baseConfig; } - logger.debug( + container.logger.debug( `Could not collect PowerSync config with ${collector.name} method. Moving on to next method if available.` ); } catch (ex) { @@ -161,7 +161,7 @@ export class CompoundConfigCollector { if (config) { return config; } - logger.debug( + container.logger.debug( `Could not collect sync rules with ${collector.name} method. Moving on to next method if available.` ); } catch (ex) { diff --git a/packages/service-core/src/util/memory-tracking.ts b/packages/service-core/src/util/memory-tracking.ts index d7c096ddf..c50a2b788 100644 --- a/packages/service-core/src/util/memory-tracking.ts +++ b/packages/service-core/src/util/memory-tracking.ts @@ -1,4 +1,4 @@ -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; /** * Track and log memory usage. @@ -57,7 +57,7 @@ export function trackMemoryUsage() { ) )`.replaceAll(/\s+/g, ' '); - logger.info(output); + container.logger.info(output); } }, 50); } diff --git a/packages/service-core/src/util/pgwire_utils.ts b/packages/service-core/src/util/pgwire_utils.ts index 9de64892a..356969762 100644 --- a/packages/service-core/src/util/pgwire_utils.ts +++ b/packages/service-core/src/util/pgwire_utils.ts @@ -6,7 +6,7 @@ import * as pgwire from '@powersync/service-jpgwire'; import { SqliteJsonValue, SqliteRow, ToastableSqliteRow, toSyncRulesRow } from '@powersync/service-sync-rules'; import * as replication from '../replication/replication-index.js'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; /** * pgwire message -> SQLite row. @@ -133,7 +133,7 @@ export async function retriedQuery(db: pgwire.PgClient, ...args: any[]) { if (tries == 1) { throw e; } - logger.warn('Query error, retrying', e); + container.logger.warn('Query error, retrying', e); } } } diff --git a/packages/service-core/src/util/utils.ts b/packages/service-core/src/util/utils.ts index 821577c63..a67b47857 100644 --- a/packages/service-core/src/util/utils.ts +++ b/packages/service-core/src/util/utils.ts @@ -5,7 +5,7 @@ import { pgwireRows } from '@powersync/service-jpgwire'; import * as storage from '../storage/storage-index.js'; import { BucketChecksum, OpId } from './protocol-types.js'; import { retriedQuery } from './pgwire_utils.js'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; export type ChecksumMap = Map; @@ -90,14 +90,14 @@ export async function getClientCheckpoint( const timeout = options?.timeout ?? 50_000; - logger.info(`Waiting for LSN checkpoint: ${lsn}`); + container.logger.info(`Waiting for LSN checkpoint: ${lsn}`); while (Date.now() - start < timeout) { const cp = await bucketStorage.getActiveCheckpoint(); if (!cp.hasSyncRules()) { throw new Error('No sync rules available'); } if (cp.lsn >= lsn) { - logger.info(`Got write checkpoint: ${lsn} : ${cp.checkpoint}`); + container.logger.info(`Got write checkpoint: ${lsn} : ${cp.checkpoint}`); return cp.checkpoint; } @@ -117,6 +117,6 @@ export async function createWriteCheckpoint( ); const id = await bucketStorage.createWriteCheckpoint(user_id, { '1': lsn }); - logger.info(`Write checkpoint 2: ${JSON.stringify({ lsn, id: String(id) })}`); + container.logger.info(`Write checkpoint 2: ${JSON.stringify({ lsn, id: String(id) })}`); return id; } diff --git a/packages/service-core/test/src/env.ts b/packages/service-core/test/src/env.ts index f5867fc33..fa207ad08 100644 --- a/packages/service-core/test/src/env.ts +++ b/packages/service-core/test/src/env.ts @@ -1,8 +1,8 @@ -import * as framework from '@powersync/service-framework'; +import { utils } from '@powersync/service-framework'; -export const env = framework.utils.collectEnvironmentVariables({ - MONGO_TEST_URL: framework.utils.type.string.default('mongodb://localhost:27017/powersync_test'), - PG_TEST_URL: framework.utils.type.string.default('postgres://postgres:postgres@localhost:5432/powersync_test'), - CI: framework.utils.type.boolean.default('false'), - SLOW_TESTS: framework.utils.type.boolean.default('false') +export const env = utils.collectEnvironmentVariables({ + MONGO_TEST_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test'), + PG_TEST_URL: utils.type.string.default('postgres://postgres:postgres@localhost:5432/powersync_test'), + CI: utils.type.boolean.default('false'), + SLOW_TESTS: utils.type.boolean.default('false') }); diff --git a/packages/service-framework/src/container.ts b/packages/service-framework/src/container.ts new file mode 100644 index 000000000..49c326eb4 --- /dev/null +++ b/packages/service-framework/src/container.ts @@ -0,0 +1,69 @@ +import winston, { Logger } from 'winston'; + +import { ErrorReporter } from './alerts/definitions.js'; +import { NoOpReporter } from './alerts/no-op-reporter.js'; +import { ProbeModule, TerminationHandler, createFSProbe, createTerminationHandler } from './signals/signals-index.js'; + +export enum ContainerImplementation { + LOGGER = 'logger', + REPORTER = 'reporter', + PROBES = 'probes', + TERMINATION_HANDLER = 'termination-handler' +} + +export type ContainerImplementationTypes = { + [ContainerImplementation.LOGGER]: Logger; + [ContainerImplementation.REPORTER]: ErrorReporter; + [ContainerImplementation.PROBES]: ProbeModule; + [ContainerImplementation.TERMINATION_HANDLER]: TerminationHandler; +}; + +export class Container { + protected implementations: ContainerImplementationTypes; + + /** + * Logger which can be used throughout the entire project + */ + get logger() { + return this.implementations[ContainerImplementation.LOGGER]; + } + + /** + * Manager for system health probes + */ + get probes() { + return this.implementations[ContainerImplementation.PROBES]; + } + + /** + * Error reporter. Defaults to a no-op reporter + */ + get reporter() { + return this.implementations[ContainerImplementation.REPORTER]; + } + + /** + * Handler for termination of the Node process + */ + get terminationHandler() { + return this.implementations[ContainerImplementation.TERMINATION_HANDLER]; + } + + constructor() { + this.implementations = { + [ContainerImplementation.LOGGER]: winston.createLogger(), + [ContainerImplementation.REPORTER]: NoOpReporter, + [ContainerImplementation.PROBES]: createFSProbe(), + [ContainerImplementation.TERMINATION_HANDLER]: createTerminationHandler() + }; + } + + /** + * Allows for overriding a default implementation + */ + register(type: Type, implementation: ContainerImplementationTypes[Type]) { + this.implementations[type] = implementation; + } +} + +export const container = new Container(); diff --git a/packages/service-framework/src/index.ts b/packages/service-framework/src/index.ts index a679bc2e7..fcbc2b219 100644 --- a/packages/service-framework/src/index.ts +++ b/packages/service-framework/src/index.ts @@ -4,6 +4,8 @@ export * as alerts from './alerts/alerts-index.js'; export * from './codec/codec-index.js'; export * as codecs from './codec/codec-index.js'; +export * from './container.js'; + export * from './errors/errors-index.js'; export * as errors from './errors/errors-index.js'; diff --git a/packages/service-framework/src/logger/Logger.ts b/packages/service-framework/src/logger/Logger.ts index 2d9d153cb..33a9eead6 100644 --- a/packages/service-framework/src/logger/Logger.ts +++ b/packages/service-framework/src/logger/Logger.ts @@ -1,14 +1,6 @@ import winston from 'winston'; -/** - * Logger instance which is used in the entire codebase. - * This should be configured in the project which consumes the - * core package. - */ -export const logger = winston.createLogger(); - export namespace Logger { - export const instance = logger; export const development_format = winston.format.combine( winston.format.colorize({ level: true }), winston.format.simple() diff --git a/packages/service-framework/src/signals/termination-handler.ts b/packages/service-framework/src/signals/termination-handler.ts index 9edbf1995..1dd63f04c 100644 --- a/packages/service-framework/src/signals/termination-handler.ts +++ b/packages/service-framework/src/signals/termination-handler.ts @@ -1,5 +1,5 @@ import _ from 'lodash'; -import { logger } from '../logger/Logger.js'; +import { container } from '../container.js'; export enum Signal { SIGTERM = 'SIGTERM', @@ -35,14 +35,14 @@ export const createTerminationHandler = (params?: TerminationHandlerParams) => { let signal_received = false; const signalHandler = (signal: Signal) => { if (signal === Signal.SIGINT) { - logger.info('Send ^C again to force exit'); + container.logger.info('Send ^C again to force exit'); } if (signal_received) { // The SIGINT signal is sent on ctrl-c - if the user presses ctrl-c twice then we // hard exit if (signal === Signal.SIGINT) { - logger.info('Received second ^C. Exiting'); + container.logger.info('Received second ^C. Exiting'); process.exit(1); } return; @@ -51,24 +51,24 @@ export const createTerminationHandler = (params?: TerminationHandlerParams) => { signal_received = true; new Promise(async (resolve) => { - logger.info('Terminating gracefully ...'); + container.logger.info('Terminating gracefully ...'); for (const handler of handlers) { try { await handler(signal); } catch (err) { - logger.error('Failed to execute termination handler', err); + container.logger.error('Failed to execute termination handler', err); } } - logger.info('Exiting'); + container.logger.info('Exiting'); resolve(); }).then(() => { process.exit(0); }); setTimeout(() => { - logger.error('Timed out waiting for program to exit. Force exiting'); + container.logger.error('Timed out waiting for program to exit. Force exiting'); process.exit(1); }, timeout_ms); }; diff --git a/packages/service-framework/src/system/LifeCycledSystem.ts b/packages/service-framework/src/system/LifeCycledSystem.ts index d43f871fd..3cd77c938 100644 --- a/packages/service-framework/src/system/LifeCycledSystem.ts +++ b/packages/service-framework/src/system/LifeCycledSystem.ts @@ -6,9 +6,7 @@ * A System can contain anything but should offer a `start` and `stop` operation */ -import { createFSProbe } from '../signals/probes/fs-probes.js'; -import { ProbeModule } from '../signals/probes/probes.js'; -import { TerminationHandler, createTerminationHandler } from '../signals/termination-handler.js'; +import { container } from '../container.js'; export type LifecycleCallback = (singleton: T) => Promise | void; @@ -22,29 +20,11 @@ export type ComponentLifecycle = PartialLifecycle & { }; export type LifecycleHandler = () => ComponentLifecycle; -export type LifeCycledSystemOptions = { - /** - * Optional termination handler. Defaults to a NodeJS process listener handler - * if not provided. - */ - terminationHandler?: TerminationHandler; - - /** - * Probe handler for system readiness and liveliness state management. - * Defaults to a FileSystem based probe if not provided. - */ - probe?: ProbeModule; -}; - export abstract class LifeCycledSystem { components: ComponentLifecycle[] = []; - terminationHandler: TerminationHandler; - probe: ProbeModule; - constructor(options?: LifeCycledSystemOptions) { - this.terminationHandler = options?.terminationHandler ?? createTerminationHandler(); - this.terminationHandler.handleTerminationSignal(() => this.stop()); - this.probe = options?.probe ?? createFSProbe(); + constructor() { + container.terminationHandler.handleTerminationSignal(() => this.stop()); } withLifecycle = (component: T, lifecycle: PartialLifecycle): T => { diff --git a/service/src/entry.ts b/service/src/entry.ts index 4d85065ea..750388f4f 100644 --- a/service/src/entry.ts +++ b/service/src/entry.ts @@ -1,10 +1,10 @@ import winston from 'winston'; import { entry, utils } from '@powersync/service-core'; -import * as framework from '@powersync/service-framework'; +import { container, Logger } from '@powersync/service-framework'; // Configure logging to console -framework.logger.configure({ - format: utils.env.NODE_ENV == 'production' ? framework.Logger.production_format : framework.Logger.development_format, +container.logger.configure({ + format: utils.env.NODE_ENV == 'production' ? Logger.production_format : Logger.development_format, transports: [new winston.transports.Console()] }); diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index 677b6f7ea..86181449a 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -1,8 +1,8 @@ import { deserialize } from 'bson'; import fastify from 'fastify'; import cors from '@fastify/cors'; -import * as framework from '@powersync/service-framework'; import * as core from '@powersync/service-core'; +import { container, errors } from '@powersync/service-framework'; import { RSocketRequestMeta } from '@powersync/service-rsocket-router'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; @@ -11,11 +11,13 @@ import { SocketRouter } from '../routes/router.js'; * Starts an API server */ export async function startServer(runnerConfig: core.utils.RunnerConfig) { - framework.logger.info('Booting'); + container.logger.info('Booting'); const config = await core.utils.loadConfig(runnerConfig); const system = new PowerSyncSystem(config); + const { logger } = container; + const server = fastify.fastify(); // Create a separate context for concurrency queueing @@ -80,7 +82,7 @@ export async function startServer(runnerConfig: core.utils.RunnerConfig) { const { token } = core.routes.RSocketContextMeta.decode(deserialize(data) as any); if (!token) { - throw new framework.errors.ValidationError('No token provided in context'); + throw new errors.ValidationError('No token provided in context'); } try { @@ -95,7 +97,7 @@ export async function startServer(runnerConfig: core.utils.RunnerConfig) { }; } } catch (ex) { - framework.logger.error(ex); + container.logger.error(ex); } return { @@ -110,9 +112,9 @@ export async function startServer(runnerConfig: core.utils.RunnerConfig) { payloadDecoder: async (rawData?: Buffer) => rawData && deserialize(rawData) }); - framework.logger.info('Starting system'); + logger.info('Starting system'); await system.start(); - framework.logger.info('System started'); + logger.info('System started'); core.Metrics.getInstance().configureApiMetrics(); @@ -120,18 +122,18 @@ export async function startServer(runnerConfig: core.utils.RunnerConfig) { port: system.config.port }); - system.terminationHandler.handleTerminationSignal(async () => { - framework.logger.info('Shutting down HTTP server...'); + container.terminationHandler.handleTerminationSignal(async () => { + logger.info('Shutting down HTTP server...'); await server.close(); - framework.logger.info('HTTP server stopped'); + logger.info('HTTP server stopped'); }); // MUST be after adding the termination handler above. // This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit system.addTerminationHandler(); - framework.logger.info(`Running on port ${system.config.port}`); - await system.probe.ready(); + logger.info(`Running on port ${system.config.port}`); + await container.probes.ready(); // Enable in development to track memory usage: // trackMemoryUsage(); diff --git a/service/src/runners/stream-worker.ts b/service/src/runners/stream-worker.ts index faba8463f..4cfc63416 100644 --- a/service/src/runners/stream-worker.ts +++ b/service/src/runners/stream-worker.ts @@ -1,9 +1,11 @@ import { migrations, replication, utils, Metrics } from '@powersync/service-core'; -import { logger } from '@powersync/service-framework'; +import { container } from '@powersync/service-framework'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; export async function startStreamWorker(runnerConfig: utils.RunnerConfig) { + const { logger } = container; + logger.info('Booting'); const config = await utils.loadConfig(runnerConfig); @@ -31,9 +33,9 @@ export async function startStreamWorker(runnerConfig: utils.RunnerConfig) { // This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit system.addTerminationHandler(); - system.terminationHandler.handleTerminationSignal(async () => { + container.terminationHandler.handleTerminationSignal(async () => { await mngr.stop(); }); - await system.probe.ready(); + await container.probes.ready(); } diff --git a/service/src/system/PowerSyncSystem.ts b/service/src/system/PowerSyncSystem.ts index 249219610..93024dca7 100644 --- a/service/src/system/PowerSyncSystem.ts +++ b/service/src/system/PowerSyncSystem.ts @@ -1,14 +1,12 @@ import { db, system, utils, storage, Metrics } from '@powersync/service-core'; import * as pgwire from '@powersync/service-jpgwire'; -import { sentryErrorReporter } from '../util/alerting.js'; - export class PowerSyncSystem extends system.CorePowerSyncSystem { storage: storage.BucketStorageFactory; pgwire_pool?: pgwire.PgClient; constructor(public config: utils.ResolvedPowerSyncConfig) { - super(config, { errorReporter: sentryErrorReporter }); + super(config); utils.setTags(config.metadata); @@ -34,8 +32,7 @@ export class PowerSyncSystem extends system.CorePowerSyncSystem { }); const database = new storage.PowerSyncMongo(client, { database: config.storage.database }); this.storage = new storage.MongoBucketStorage(database, { - slot_name_prefix: config.slot_name_prefix, - errorReporter: this.errorReporter + slot_name_prefix: config.slot_name_prefix }); } else { throw new Error('No storage configured'); diff --git a/service/src/util/alerting.ts b/service/src/util/alerting.ts index 766348885..67e562873 100644 --- a/service/src/util/alerting.ts +++ b/service/src/util/alerting.ts @@ -1,7 +1,7 @@ import * as sentry_types from '@sentry/types'; import * as sentry from '@sentry/node'; import { utils } from '@powersync/service-core'; -import * as framework from '@powersync/service-framework'; +import { container, ErrorReporter } from '@powersync/service-framework'; // Generally ignore errors that are due to configuration issues, rather than // service bugs. @@ -21,7 +21,7 @@ const IGNORE_MESSAGES: RegExp[] = [ export const createSentryReporter = (opts?: { beforeSend?: (event: sentry_types.Event, hint: sentry_types.EventHint) => any; -}): framework.ErrorReporter => { +}): ErrorReporter => { if (process.env.SENTRY_DSN) { sentry.init({ dsn: process.env.SENTRY_DSN, @@ -31,7 +31,7 @@ export const createSentryReporter = (opts?: { beforeSend: opts?.beforeSend ? opts.beforeSend : undefined }); } else { - framework.logger.debug( + container.logger.debug( 'Alerts configured with sentry reporter but no SENTRY_DSN environment variable has been set' ); } From 458e743e383914aa2937379653b1a0062927d209 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 11:50:58 +0200 Subject: [PATCH 19/36] neaten folder structure --- .changeset/fuzzy-knives-brush.md | 5 ++ .changeset/shaggy-parents-join.md | 2 +- .../lib-services}/package.json | 2 +- .../lib-services}/src/alerts/alerts-index.ts | 0 .../lib-services}/src/alerts/definitions.ts | 0 .../src/alerts/no-op-reporter.ts | 0 .../lib-services}/src/codec/codec-index.ts | 0 .../lib-services}/src/codec/codecs.ts | 0 .../lib-services}/src/codec/parsers.ts | 0 .../lib-services}/src/container.ts | 0 .../lib-services}/src/errors/errors-index.ts | 0 .../src/errors/framework-errors.ts | 0 .../lib-services}/src/errors/utils.ts | 0 .../lib-services}/src/index.ts | 0 .../lib-services}/src/logger/Logger.ts | 0 .../lib-services}/src/logger/logger-index.ts | 0 .../lib-services}/src/router/endpoint.ts | 0 .../src/router/router-definitions.ts | 0 .../lib-services}/src/router/router-index.ts | 0 .../src/router/router-response.ts | 0 .../lib-services}/src/schema/definitions.ts | 0 .../src/schema/json-schema/keywords.ts | 0 .../src/schema/json-schema/parser.ts | 0 .../lib-services}/src/schema/schema-index.ts | 0 .../lib-services}/src/schema/utils.ts | 0 .../src/schema/validators/schema-validator.ts | 0 .../schema/validators/ts-codec-validator.ts | 0 .../src/signals/probes/fs-probes.ts | 0 .../src/signals/probes/memory-probes.ts | 0 .../src/signals/probes/probes.ts | 0 .../src/signals/signals-index.ts | 0 .../src/signals/termination-handler.ts | 0 .../src/system/LifeCycledSystem.ts | 0 .../lib-services}/src/system/system-index.ts | 0 .../src/utils/environment-variables.ts | 0 .../lib-services}/src/utils/utils-index.ts | 0 .../test/__snapshots__/errors.test.ts.snap | 0 .../src/__snapshots__/errors.test.ts.snap | 0 .../src/__snapshots__/parser.test.ts.snap | 0 .../schema-validation.test.ts.snap | 0 .../ts-codec-validation.test.ts.snap | 0 .../lib-services}/test/src/errors.test.ts | 0 .../lib-services}/test/src/fixtures/schema.ts | 0 .../schema/__snapshots__/parser.test.ts.snap | 0 .../schema-validation.test.ts.snap | 0 .../ts-codec-validation.test.ts.snap | 0 .../test/src/schema/parser.test.ts | 0 .../test/src/schema/schema-validation.test.ts | 0 .../src/schema/ts-codec-validation.test.ts | 0 .../lib-services}/test/tsconfig.json | 0 .../lib-services}/tsconfig.json | 0 packages/rsocket-router/package.json | 2 +- .../src/router/ReactiveSocketRouter.ts | 2 +- .../transport/WebSocketServerTransport.ts | 2 +- .../transport/WebsocketDuplexConnection.ts | 2 +- packages/rsocket-router/src/router/types.ts | 2 +- packages/rsocket-router/tsconfig.json | 2 +- packages/service-core/package.json | 2 +- packages/service-core/src/api/diagnostics.ts | 2 +- packages/service-core/src/auth/KeyStore.ts | 2 +- packages/service-core/src/entry/cli-entry.ts | 2 +- packages/service-core/src/metrics/Metrics.ts | 2 +- .../service-core/src/migrations/executor.ts | 2 +- .../service-core/src/replication/WalStream.ts | 2 +- .../src/replication/WalStreamManager.ts | 2 +- .../src/replication/WalStreamRunner.ts | 2 +- .../src/routes/endpoints/admin.ts | 2 +- .../src/routes/endpoints/checkpointing.ts | 2 +- .../service-core/src/routes/endpoints/dev.ts | 2 +- .../src/routes/endpoints/socket-route.ts | 2 +- .../src/routes/endpoints/sync-rules.ts | 2 +- .../src/routes/endpoints/sync-stream.ts | 2 +- packages/service-core/src/routes/hooks.ts | 2 +- .../service-core/src/routes/route-register.ts | 2 +- packages/service-core/src/routes/router.ts | 2 +- packages/service-core/src/runner/teardown.ts | 2 +- .../src/storage/MongoBucketStorage.ts | 2 +- .../src/storage/mongo/MongoBucketBatch.ts | 2 +- .../src/storage/mongo/MongoSyncRulesLock.ts | 2 +- .../src/storage/mongo/PersistedBatch.ts | 2 +- packages/service-core/src/sync/sync.ts | 2 +- .../src/system/CorePowerSyncSystem.ts | 2 +- .../config/collectors/config-collector.ts | 2 +- .../impl/filesystem-config-collector.ts | 2 +- .../util/config/compound-config-collector.ts | 2 +- packages/service-core/src/util/env.ts | 2 +- .../service-core/src/util/memory-tracking.ts | 2 +- .../service-core/src/util/pgwire_utils.ts | 2 +- packages/service-core/src/util/utils.ts | 2 +- packages/service-core/test/src/env.ts | 2 +- .../service-core/test/src/slow_tests.test.ts | 2 +- packages/service-core/test/src/util.ts | 2 +- .../service-core/test/src/wal_stream_utils.ts | 2 +- packages/service-core/tsconfig.json | 2 +- pnpm-lock.yaml | 86 +++++++++---------- pnpm-workspace.yaml | 1 + service/package.json | 2 +- service/src/entry.ts | 2 +- service/src/runners/server.ts | 2 +- service/src/runners/stream-worker.ts | 2 +- service/src/util/alerting.ts | 2 +- service/tsconfig.json | 2 +- tsconfig.json | 2 +- 103 files changed, 101 insertions(+), 95 deletions(-) create mode 100644 .changeset/fuzzy-knives-brush.md rename {packages/service-framework => libs/lib-services}/package.json (94%) rename {packages/service-framework => libs/lib-services}/src/alerts/alerts-index.ts (100%) rename {packages/service-framework => libs/lib-services}/src/alerts/definitions.ts (100%) rename {packages/service-framework => libs/lib-services}/src/alerts/no-op-reporter.ts (100%) rename {packages/service-framework => libs/lib-services}/src/codec/codec-index.ts (100%) rename {packages/service-framework => libs/lib-services}/src/codec/codecs.ts (100%) rename {packages/service-framework => libs/lib-services}/src/codec/parsers.ts (100%) rename {packages/service-framework => libs/lib-services}/src/container.ts (100%) rename {packages/service-framework => libs/lib-services}/src/errors/errors-index.ts (100%) rename {packages/service-framework => libs/lib-services}/src/errors/framework-errors.ts (100%) rename {packages/service-framework => libs/lib-services}/src/errors/utils.ts (100%) rename {packages/service-framework => libs/lib-services}/src/index.ts (100%) rename {packages/service-framework => libs/lib-services}/src/logger/Logger.ts (100%) rename {packages/service-framework => libs/lib-services}/src/logger/logger-index.ts (100%) rename {packages/service-framework => libs/lib-services}/src/router/endpoint.ts (100%) rename {packages/service-framework => libs/lib-services}/src/router/router-definitions.ts (100%) rename {packages/service-framework => libs/lib-services}/src/router/router-index.ts (100%) rename {packages/service-framework => libs/lib-services}/src/router/router-response.ts (100%) rename {packages/service-framework => libs/lib-services}/src/schema/definitions.ts (100%) rename {packages/service-framework => libs/lib-services}/src/schema/json-schema/keywords.ts (100%) rename {packages/service-framework => libs/lib-services}/src/schema/json-schema/parser.ts (100%) rename {packages/service-framework => libs/lib-services}/src/schema/schema-index.ts (100%) rename {packages/service-framework => libs/lib-services}/src/schema/utils.ts (100%) rename {packages/service-framework => libs/lib-services}/src/schema/validators/schema-validator.ts (100%) rename {packages/service-framework => libs/lib-services}/src/schema/validators/ts-codec-validator.ts (100%) rename {packages/service-framework => libs/lib-services}/src/signals/probes/fs-probes.ts (100%) rename {packages/service-framework => libs/lib-services}/src/signals/probes/memory-probes.ts (100%) rename {packages/service-framework => libs/lib-services}/src/signals/probes/probes.ts (100%) rename {packages/service-framework => libs/lib-services}/src/signals/signals-index.ts (100%) rename {packages/service-framework => libs/lib-services}/src/signals/termination-handler.ts (100%) rename {packages/service-framework => libs/lib-services}/src/system/LifeCycledSystem.ts (100%) rename {packages/service-framework => libs/lib-services}/src/system/system-index.ts (100%) rename {packages/service-framework => libs/lib-services}/src/utils/environment-variables.ts (100%) rename {packages/service-framework => libs/lib-services}/src/utils/utils-index.ts (100%) rename {packages/service-framework => libs/lib-services}/test/__snapshots__/errors.test.ts.snap (100%) rename {packages/service-framework => libs/lib-services}/test/src/__snapshots__/errors.test.ts.snap (100%) rename {packages/service-framework => libs/lib-services}/test/src/__snapshots__/parser.test.ts.snap (100%) rename {packages/service-framework => libs/lib-services}/test/src/__snapshots__/schema-validation.test.ts.snap (100%) rename {packages/service-framework => libs/lib-services}/test/src/__snapshots__/ts-codec-validation.test.ts.snap (100%) rename {packages/service-framework => libs/lib-services}/test/src/errors.test.ts (100%) rename {packages/service-framework => libs/lib-services}/test/src/fixtures/schema.ts (100%) rename {packages/service-framework => libs/lib-services}/test/src/schema/__snapshots__/parser.test.ts.snap (100%) rename {packages/service-framework => libs/lib-services}/test/src/schema/__snapshots__/schema-validation.test.ts.snap (100%) rename {packages/service-framework => libs/lib-services}/test/src/schema/__snapshots__/ts-codec-validation.test.ts.snap (100%) rename {packages/service-framework => libs/lib-services}/test/src/schema/parser.test.ts (100%) rename {packages/service-framework => libs/lib-services}/test/src/schema/schema-validation.test.ts (100%) rename {packages/service-framework => libs/lib-services}/test/src/schema/ts-codec-validation.test.ts (100%) rename {packages/service-framework => libs/lib-services}/test/tsconfig.json (100%) rename {packages/service-framework => libs/lib-services}/tsconfig.json (100%) diff --git a/.changeset/fuzzy-knives-brush.md b/.changeset/fuzzy-knives-brush.md new file mode 100644 index 000000000..1e86ad4e3 --- /dev/null +++ b/.changeset/fuzzy-knives-brush.md @@ -0,0 +1,5 @@ +--- +'@powersync/lib-services-framework': minor +--- + +Initial release for service utillities diff --git a/.changeset/shaggy-parents-join.md b/.changeset/shaggy-parents-join.md index a9208585f..a2adeedb1 100644 --- a/.changeset/shaggy-parents-join.md +++ b/.changeset/shaggy-parents-join.md @@ -2,4 +2,4 @@ '@powersync/service-core': minor --- -Updated router definitions +Removed dependency for restricted packages diff --git a/packages/service-framework/package.json b/libs/lib-services/package.json similarity index 94% rename from packages/service-framework/package.json rename to libs/lib-services/package.json index 366e348c7..e26e8bcb8 100644 --- a/packages/service-framework/package.json +++ b/libs/lib-services/package.json @@ -1,5 +1,5 @@ { - "name": "@powersync/service-framework", + "name": "@powersync/lib-services-framework", "repository": "https://github.com/powersync-ja/powersync-service", "version": "0.0.1", "main": "dist/index.js", diff --git a/packages/service-framework/src/alerts/alerts-index.ts b/libs/lib-services/src/alerts/alerts-index.ts similarity index 100% rename from packages/service-framework/src/alerts/alerts-index.ts rename to libs/lib-services/src/alerts/alerts-index.ts diff --git a/packages/service-framework/src/alerts/definitions.ts b/libs/lib-services/src/alerts/definitions.ts similarity index 100% rename from packages/service-framework/src/alerts/definitions.ts rename to libs/lib-services/src/alerts/definitions.ts diff --git a/packages/service-framework/src/alerts/no-op-reporter.ts b/libs/lib-services/src/alerts/no-op-reporter.ts similarity index 100% rename from packages/service-framework/src/alerts/no-op-reporter.ts rename to libs/lib-services/src/alerts/no-op-reporter.ts diff --git a/packages/service-framework/src/codec/codec-index.ts b/libs/lib-services/src/codec/codec-index.ts similarity index 100% rename from packages/service-framework/src/codec/codec-index.ts rename to libs/lib-services/src/codec/codec-index.ts diff --git a/packages/service-framework/src/codec/codecs.ts b/libs/lib-services/src/codec/codecs.ts similarity index 100% rename from packages/service-framework/src/codec/codecs.ts rename to libs/lib-services/src/codec/codecs.ts diff --git a/packages/service-framework/src/codec/parsers.ts b/libs/lib-services/src/codec/parsers.ts similarity index 100% rename from packages/service-framework/src/codec/parsers.ts rename to libs/lib-services/src/codec/parsers.ts diff --git a/packages/service-framework/src/container.ts b/libs/lib-services/src/container.ts similarity index 100% rename from packages/service-framework/src/container.ts rename to libs/lib-services/src/container.ts diff --git a/packages/service-framework/src/errors/errors-index.ts b/libs/lib-services/src/errors/errors-index.ts similarity index 100% rename from packages/service-framework/src/errors/errors-index.ts rename to libs/lib-services/src/errors/errors-index.ts diff --git a/packages/service-framework/src/errors/framework-errors.ts b/libs/lib-services/src/errors/framework-errors.ts similarity index 100% rename from packages/service-framework/src/errors/framework-errors.ts rename to libs/lib-services/src/errors/framework-errors.ts diff --git a/packages/service-framework/src/errors/utils.ts b/libs/lib-services/src/errors/utils.ts similarity index 100% rename from packages/service-framework/src/errors/utils.ts rename to libs/lib-services/src/errors/utils.ts diff --git a/packages/service-framework/src/index.ts b/libs/lib-services/src/index.ts similarity index 100% rename from packages/service-framework/src/index.ts rename to libs/lib-services/src/index.ts diff --git a/packages/service-framework/src/logger/Logger.ts b/libs/lib-services/src/logger/Logger.ts similarity index 100% rename from packages/service-framework/src/logger/Logger.ts rename to libs/lib-services/src/logger/Logger.ts diff --git a/packages/service-framework/src/logger/logger-index.ts b/libs/lib-services/src/logger/logger-index.ts similarity index 100% rename from packages/service-framework/src/logger/logger-index.ts rename to libs/lib-services/src/logger/logger-index.ts diff --git a/packages/service-framework/src/router/endpoint.ts b/libs/lib-services/src/router/endpoint.ts similarity index 100% rename from packages/service-framework/src/router/endpoint.ts rename to libs/lib-services/src/router/endpoint.ts diff --git a/packages/service-framework/src/router/router-definitions.ts b/libs/lib-services/src/router/router-definitions.ts similarity index 100% rename from packages/service-framework/src/router/router-definitions.ts rename to libs/lib-services/src/router/router-definitions.ts diff --git a/packages/service-framework/src/router/router-index.ts b/libs/lib-services/src/router/router-index.ts similarity index 100% rename from packages/service-framework/src/router/router-index.ts rename to libs/lib-services/src/router/router-index.ts diff --git a/packages/service-framework/src/router/router-response.ts b/libs/lib-services/src/router/router-response.ts similarity index 100% rename from packages/service-framework/src/router/router-response.ts rename to libs/lib-services/src/router/router-response.ts diff --git a/packages/service-framework/src/schema/definitions.ts b/libs/lib-services/src/schema/definitions.ts similarity index 100% rename from packages/service-framework/src/schema/definitions.ts rename to libs/lib-services/src/schema/definitions.ts diff --git a/packages/service-framework/src/schema/json-schema/keywords.ts b/libs/lib-services/src/schema/json-schema/keywords.ts similarity index 100% rename from packages/service-framework/src/schema/json-schema/keywords.ts rename to libs/lib-services/src/schema/json-schema/keywords.ts diff --git a/packages/service-framework/src/schema/json-schema/parser.ts b/libs/lib-services/src/schema/json-schema/parser.ts similarity index 100% rename from packages/service-framework/src/schema/json-schema/parser.ts rename to libs/lib-services/src/schema/json-schema/parser.ts diff --git a/packages/service-framework/src/schema/schema-index.ts b/libs/lib-services/src/schema/schema-index.ts similarity index 100% rename from packages/service-framework/src/schema/schema-index.ts rename to libs/lib-services/src/schema/schema-index.ts diff --git a/packages/service-framework/src/schema/utils.ts b/libs/lib-services/src/schema/utils.ts similarity index 100% rename from packages/service-framework/src/schema/utils.ts rename to libs/lib-services/src/schema/utils.ts diff --git a/packages/service-framework/src/schema/validators/schema-validator.ts b/libs/lib-services/src/schema/validators/schema-validator.ts similarity index 100% rename from packages/service-framework/src/schema/validators/schema-validator.ts rename to libs/lib-services/src/schema/validators/schema-validator.ts diff --git a/packages/service-framework/src/schema/validators/ts-codec-validator.ts b/libs/lib-services/src/schema/validators/ts-codec-validator.ts similarity index 100% rename from packages/service-framework/src/schema/validators/ts-codec-validator.ts rename to libs/lib-services/src/schema/validators/ts-codec-validator.ts diff --git a/packages/service-framework/src/signals/probes/fs-probes.ts b/libs/lib-services/src/signals/probes/fs-probes.ts similarity index 100% rename from packages/service-framework/src/signals/probes/fs-probes.ts rename to libs/lib-services/src/signals/probes/fs-probes.ts diff --git a/packages/service-framework/src/signals/probes/memory-probes.ts b/libs/lib-services/src/signals/probes/memory-probes.ts similarity index 100% rename from packages/service-framework/src/signals/probes/memory-probes.ts rename to libs/lib-services/src/signals/probes/memory-probes.ts diff --git a/packages/service-framework/src/signals/probes/probes.ts b/libs/lib-services/src/signals/probes/probes.ts similarity index 100% rename from packages/service-framework/src/signals/probes/probes.ts rename to libs/lib-services/src/signals/probes/probes.ts diff --git a/packages/service-framework/src/signals/signals-index.ts b/libs/lib-services/src/signals/signals-index.ts similarity index 100% rename from packages/service-framework/src/signals/signals-index.ts rename to libs/lib-services/src/signals/signals-index.ts diff --git a/packages/service-framework/src/signals/termination-handler.ts b/libs/lib-services/src/signals/termination-handler.ts similarity index 100% rename from packages/service-framework/src/signals/termination-handler.ts rename to libs/lib-services/src/signals/termination-handler.ts diff --git a/packages/service-framework/src/system/LifeCycledSystem.ts b/libs/lib-services/src/system/LifeCycledSystem.ts similarity index 100% rename from packages/service-framework/src/system/LifeCycledSystem.ts rename to libs/lib-services/src/system/LifeCycledSystem.ts diff --git a/packages/service-framework/src/system/system-index.ts b/libs/lib-services/src/system/system-index.ts similarity index 100% rename from packages/service-framework/src/system/system-index.ts rename to libs/lib-services/src/system/system-index.ts diff --git a/packages/service-framework/src/utils/environment-variables.ts b/libs/lib-services/src/utils/environment-variables.ts similarity index 100% rename from packages/service-framework/src/utils/environment-variables.ts rename to libs/lib-services/src/utils/environment-variables.ts diff --git a/packages/service-framework/src/utils/utils-index.ts b/libs/lib-services/src/utils/utils-index.ts similarity index 100% rename from packages/service-framework/src/utils/utils-index.ts rename to libs/lib-services/src/utils/utils-index.ts diff --git a/packages/service-framework/test/__snapshots__/errors.test.ts.snap b/libs/lib-services/test/__snapshots__/errors.test.ts.snap similarity index 100% rename from packages/service-framework/test/__snapshots__/errors.test.ts.snap rename to libs/lib-services/test/__snapshots__/errors.test.ts.snap diff --git a/packages/service-framework/test/src/__snapshots__/errors.test.ts.snap b/libs/lib-services/test/src/__snapshots__/errors.test.ts.snap similarity index 100% rename from packages/service-framework/test/src/__snapshots__/errors.test.ts.snap rename to libs/lib-services/test/src/__snapshots__/errors.test.ts.snap diff --git a/packages/service-framework/test/src/__snapshots__/parser.test.ts.snap b/libs/lib-services/test/src/__snapshots__/parser.test.ts.snap similarity index 100% rename from packages/service-framework/test/src/__snapshots__/parser.test.ts.snap rename to libs/lib-services/test/src/__snapshots__/parser.test.ts.snap diff --git a/packages/service-framework/test/src/__snapshots__/schema-validation.test.ts.snap b/libs/lib-services/test/src/__snapshots__/schema-validation.test.ts.snap similarity index 100% rename from packages/service-framework/test/src/__snapshots__/schema-validation.test.ts.snap rename to libs/lib-services/test/src/__snapshots__/schema-validation.test.ts.snap diff --git a/packages/service-framework/test/src/__snapshots__/ts-codec-validation.test.ts.snap b/libs/lib-services/test/src/__snapshots__/ts-codec-validation.test.ts.snap similarity index 100% rename from packages/service-framework/test/src/__snapshots__/ts-codec-validation.test.ts.snap rename to libs/lib-services/test/src/__snapshots__/ts-codec-validation.test.ts.snap diff --git a/packages/service-framework/test/src/errors.test.ts b/libs/lib-services/test/src/errors.test.ts similarity index 100% rename from packages/service-framework/test/src/errors.test.ts rename to libs/lib-services/test/src/errors.test.ts diff --git a/packages/service-framework/test/src/fixtures/schema.ts b/libs/lib-services/test/src/fixtures/schema.ts similarity index 100% rename from packages/service-framework/test/src/fixtures/schema.ts rename to libs/lib-services/test/src/fixtures/schema.ts diff --git a/packages/service-framework/test/src/schema/__snapshots__/parser.test.ts.snap b/libs/lib-services/test/src/schema/__snapshots__/parser.test.ts.snap similarity index 100% rename from packages/service-framework/test/src/schema/__snapshots__/parser.test.ts.snap rename to libs/lib-services/test/src/schema/__snapshots__/parser.test.ts.snap diff --git a/packages/service-framework/test/src/schema/__snapshots__/schema-validation.test.ts.snap b/libs/lib-services/test/src/schema/__snapshots__/schema-validation.test.ts.snap similarity index 100% rename from packages/service-framework/test/src/schema/__snapshots__/schema-validation.test.ts.snap rename to libs/lib-services/test/src/schema/__snapshots__/schema-validation.test.ts.snap diff --git a/packages/service-framework/test/src/schema/__snapshots__/ts-codec-validation.test.ts.snap b/libs/lib-services/test/src/schema/__snapshots__/ts-codec-validation.test.ts.snap similarity index 100% rename from packages/service-framework/test/src/schema/__snapshots__/ts-codec-validation.test.ts.snap rename to libs/lib-services/test/src/schema/__snapshots__/ts-codec-validation.test.ts.snap diff --git a/packages/service-framework/test/src/schema/parser.test.ts b/libs/lib-services/test/src/schema/parser.test.ts similarity index 100% rename from packages/service-framework/test/src/schema/parser.test.ts rename to libs/lib-services/test/src/schema/parser.test.ts diff --git a/packages/service-framework/test/src/schema/schema-validation.test.ts b/libs/lib-services/test/src/schema/schema-validation.test.ts similarity index 100% rename from packages/service-framework/test/src/schema/schema-validation.test.ts rename to libs/lib-services/test/src/schema/schema-validation.test.ts diff --git a/packages/service-framework/test/src/schema/ts-codec-validation.test.ts b/libs/lib-services/test/src/schema/ts-codec-validation.test.ts similarity index 100% rename from packages/service-framework/test/src/schema/ts-codec-validation.test.ts rename to libs/lib-services/test/src/schema/ts-codec-validation.test.ts diff --git a/packages/service-framework/test/tsconfig.json b/libs/lib-services/test/tsconfig.json similarity index 100% rename from packages/service-framework/test/tsconfig.json rename to libs/lib-services/test/tsconfig.json diff --git a/packages/service-framework/tsconfig.json b/libs/lib-services/tsconfig.json similarity index 100% rename from packages/service-framework/tsconfig.json rename to libs/lib-services/tsconfig.json diff --git a/packages/rsocket-router/package.json b/packages/rsocket-router/package.json index 2bd346948..af60be92e 100644 --- a/packages/rsocket-router/package.json +++ b/packages/rsocket-router/package.json @@ -18,7 +18,7 @@ "test": "vitest" }, "dependencies": { - "@powersync/service-framework": "workspace:*", + "@powersync/lib-services-framework": "workspace:*", "rsocket-core": "1.0.0-alpha.3", "ts-codec": "^1.2.2", "uuid": "^9.0.1", diff --git a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts index fc4496e24..77610f6b0 100644 --- a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts +++ b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts @@ -16,7 +16,7 @@ import { SocketResponder } from './types.js'; import { WebsocketServerTransport } from './transport/WebSocketServerTransport.js'; -import { container, errors } from '@powersync/service-framework'; +import { container, errors } from '@powersync/lib-services-framework'; export class ReactiveSocketRouter { protected activeConnections: number; diff --git a/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts b/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts index bceefa2c6..28b86518c 100644 --- a/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts +++ b/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts @@ -28,7 +28,7 @@ import { } from 'rsocket-core'; import * as WebSocket from 'ws'; import { WebsocketDuplexConnection } from './WebsocketDuplexConnection.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; export type SocketFactory = (options: SocketOptions) => WebSocket.WebSocketServer; diff --git a/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts b/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts index 1e6db83b2..059cf4684 100644 --- a/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts +++ b/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts @@ -15,7 +15,7 @@ * limitations under the License. */ -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; import { Closeable, Deferred, diff --git a/packages/rsocket-router/src/router/types.ts b/packages/rsocket-router/src/router/types.ts index cee12ac00..32ed940f8 100644 --- a/packages/rsocket-router/src/router/types.ts +++ b/packages/rsocket-router/src/router/types.ts @@ -1,5 +1,5 @@ import * as t from 'ts-codec'; -import { router } from '@powersync/service-framework'; +import { router } from '@powersync/lib-services-framework'; import { OnExtensionSubscriber, OnNextSubscriber, OnTerminalSubscriber } from 'rsocket-core'; diff --git a/packages/rsocket-router/tsconfig.json b/packages/rsocket-router/tsconfig.json index 9d9458e4d..fbc2f5400 100644 --- a/packages/rsocket-router/tsconfig.json +++ b/packages/rsocket-router/tsconfig.json @@ -8,7 +8,7 @@ "include": ["src"], "references": [ { - "path": "../service-framework" + "path": "../../libs/lib-services" } ] } diff --git a/packages/service-core/package.json b/packages/service-core/package.json index 494a9a10f..65468ccb5 100644 --- a/packages/service-core/package.json +++ b/packages/service-core/package.json @@ -22,7 +22,7 @@ "@opentelemetry/exporter-prometheus": "^0.51.1", "@opentelemetry/resources": "^1.24.1", "@opentelemetry/sdk-metrics": "1.24.1", - "@powersync/service-framework": "workspace:*", + "@powersync/lib-services-framework": "workspace:*", "@powersync/service-jpgwire": "workspace:*", "@powersync/service-jsonbig": "workspace:*", "@powersync/service-rsocket-router": "workspace:*", diff --git a/packages/service-core/src/api/diagnostics.ts b/packages/service-core/src/api/diagnostics.ts index 91fefd121..87a9a27aa 100644 --- a/packages/service-core/src/api/diagnostics.ts +++ b/packages/service-core/src/api/diagnostics.ts @@ -7,7 +7,7 @@ import * as storage from '../storage/storage-index.js'; import * as util from '../util/util-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; export async function getConnectionStatus(system: CorePowerSyncSystem): Promise { if (system.pgwire_pool == null) { diff --git a/packages/service-core/src/auth/KeyStore.ts b/packages/service-core/src/auth/KeyStore.ts index 3e05a21f3..9fbaf8417 100644 --- a/packages/service-core/src/auth/KeyStore.ts +++ b/packages/service-core/src/auth/KeyStore.ts @@ -3,7 +3,7 @@ import secs from '../util/secs.js'; import { KeyOptions, KeySpec, SUPPORTED_ALGORITHMS } from './KeySpec.js'; import { KeyCollector } from './KeyCollector.js'; import { JwtPayload } from './JwtPayload.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; /** * KeyStore to get keys and verify tokens. diff --git a/packages/service-core/src/entry/cli-entry.ts b/packages/service-core/src/entry/cli-entry.ts index 93b053d25..31026bd19 100644 --- a/packages/service-core/src/entry/cli-entry.ts +++ b/packages/service-core/src/entry/cli-entry.ts @@ -4,7 +4,7 @@ import * as utils from '../util/util-index.js'; import { registerMigrationAction } from './commands/migrate-action.js'; import { registerTearDownAction } from './commands/teardown-action.js'; import { registerStartAction } from './entry-index.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; /** * Generates a Commander program which serves as the entry point diff --git a/packages/service-core/src/metrics/Metrics.ts b/packages/service-core/src/metrics/Metrics.ts index fb260ce31..1b3a0f13a 100644 --- a/packages/service-core/src/metrics/Metrics.ts +++ b/packages/service-core/src/metrics/Metrics.ts @@ -7,7 +7,7 @@ import * as util from '../util/util-index.js'; import * as storage from '../storage/storage-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; import { Resource } from '@opentelemetry/resources'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; export interface MetricsOptions { disable_telemetry_sharing: boolean; diff --git a/packages/service-core/src/migrations/executor.ts b/packages/service-core/src/migrations/executor.ts index a281ec8c4..39c3f4c51 100644 --- a/packages/service-core/src/migrations/executor.ts +++ b/packages/service-core/src/migrations/executor.ts @@ -1,6 +1,6 @@ import * as defs from './definitions.js'; import { MigrationStore } from './store/migration-store.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; type ExecuteParams = { migrations: defs.Migration[]; diff --git a/packages/service-core/src/replication/WalStream.ts b/packages/service-core/src/replication/WalStream.ts index 2e8271a41..e764732ca 100644 --- a/packages/service-core/src/replication/WalStream.ts +++ b/packages/service-core/src/replication/WalStream.ts @@ -1,5 +1,5 @@ import * as pgwire from '@powersync/service-jpgwire'; -import { container, errors } from '@powersync/service-framework'; +import { container, errors } from '@powersync/lib-services-framework'; import { SqliteRow, SqlSyncRules, TablePattern, toSyncRulesRow } from '@powersync/service-sync-rules'; import * as storage from '../storage/storage-index.js'; diff --git a/packages/service-core/src/replication/WalStreamManager.ts b/packages/service-core/src/replication/WalStreamManager.ts index 7f4b19bb6..21b4fcf33 100644 --- a/packages/service-core/src/replication/WalStreamManager.ts +++ b/packages/service-core/src/replication/WalStreamManager.ts @@ -7,7 +7,7 @@ import * as util from '../util/util-index.js'; import { DefaultErrorRateLimiter } from './ErrorRateLimiter.js'; import { WalStreamRunner } from './WalStreamRunner.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; // 5 minutes const PING_INTERVAL = 1_000_000_000n * 300n; diff --git a/packages/service-core/src/replication/WalStreamRunner.ts b/packages/service-core/src/replication/WalStreamRunner.ts index 5b11fbace..6d866f2de 100644 --- a/packages/service-core/src/replication/WalStreamRunner.ts +++ b/packages/service-core/src/replication/WalStreamRunner.ts @@ -6,7 +6,7 @@ import * as util from '../util/util-index.js'; import { ErrorRateLimiter } from './ErrorRateLimiter.js'; import { MissingReplicationSlotError, WalStream } from './WalStream.js'; import { ResolvedConnection } from '../util/config/types.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; export interface WalStreamRunnerOptions { factory: storage.BucketStorageFactory; diff --git a/packages/service-core/src/routes/endpoints/admin.ts b/packages/service-core/src/routes/endpoints/admin.ts index b2f044352..616c47a02 100644 --- a/packages/service-core/src/routes/endpoints/admin.ts +++ b/packages/service-core/src/routes/endpoints/admin.ts @@ -1,4 +1,4 @@ -import { errors, router, schema } from '@powersync/service-framework'; +import { errors, router, schema } from '@powersync/lib-services-framework'; import { SqlSyncRules, SqliteValue, StaticSchema, isJsonValue, toSyncRulesValue } from '@powersync/service-sync-rules'; import { internal_routes } from '@powersync/service-types'; diff --git a/packages/service-core/src/routes/endpoints/checkpointing.ts b/packages/service-core/src/routes/endpoints/checkpointing.ts index 14d6aebb2..a2664ed91 100644 --- a/packages/service-core/src/routes/endpoints/checkpointing.ts +++ b/packages/service-core/src/routes/endpoints/checkpointing.ts @@ -1,5 +1,5 @@ import * as t from 'ts-codec'; -import { router, schema } from '@powersync/service-framework'; +import { router, schema } from '@powersync/lib-services-framework'; import * as util from '../../util/util-index.js'; import { authUser } from '../auth.js'; diff --git a/packages/service-core/src/routes/endpoints/dev.ts b/packages/service-core/src/routes/endpoints/dev.ts index b7f28c968..9ecafea05 100644 --- a/packages/service-core/src/routes/endpoints/dev.ts +++ b/packages/service-core/src/routes/endpoints/dev.ts @@ -1,6 +1,6 @@ import * as t from 'ts-codec'; import * as pgwire from '@powersync/service-jpgwire'; -import { errors, router, schema } from '@powersync/service-framework'; +import { errors, router, schema } from '@powersync/lib-services-framework'; import * as util from '../../util/util-index.js'; import { authDevUser, authUser, endpoint, issueDevToken, issueLegacyDevToken, issuePowerSyncToken } from '../auth.js'; diff --git a/packages/service-core/src/routes/endpoints/socket-route.ts b/packages/service-core/src/routes/endpoints/socket-route.ts index 3aa785b55..576a58735 100644 --- a/packages/service-core/src/routes/endpoints/socket-route.ts +++ b/packages/service-core/src/routes/endpoints/socket-route.ts @@ -1,6 +1,6 @@ import { serialize } from 'bson'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; -import { container, errors, schema } from '@powersync/service-framework'; +import { container, errors, schema } from '@powersync/lib-services-framework'; import * as util from '../../util/util-index.js'; import { streamResponse } from '../../sync/sync.js'; diff --git a/packages/service-core/src/routes/endpoints/sync-rules.ts b/packages/service-core/src/routes/endpoints/sync-rules.ts index e6d786a6d..916f9ca22 100644 --- a/packages/service-core/src/routes/endpoints/sync-rules.ts +++ b/packages/service-core/src/routes/endpoints/sync-rules.ts @@ -1,7 +1,7 @@ import * as t from 'ts-codec'; import { FastifyPluginAsync } from 'fastify'; import * as pgwire from '@powersync/service-jpgwire'; -import { errors, router, schema } from '@powersync/service-framework'; +import { errors, router, schema } from '@powersync/lib-services-framework'; import { SqlSyncRules, SyncRulesErrors } from '@powersync/service-sync-rules'; import * as replication from '../../replication/replication-index.js'; diff --git a/packages/service-core/src/routes/endpoints/sync-stream.ts b/packages/service-core/src/routes/endpoints/sync-stream.ts index 2ecf194d9..c7e30eedc 100644 --- a/packages/service-core/src/routes/endpoints/sync-stream.ts +++ b/packages/service-core/src/routes/endpoints/sync-stream.ts @@ -1,6 +1,6 @@ import { Readable } from 'stream'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; -import { container, errors, router, schema } from '@powersync/service-framework'; +import { container, errors, router, schema } from '@powersync/lib-services-framework'; import * as sync from '../../sync/sync-index.js'; import * as util from '../../util/util-index.js'; diff --git a/packages/service-core/src/routes/hooks.ts b/packages/service-core/src/routes/hooks.ts index 984d56a05..2c4a641fb 100644 --- a/packages/service-core/src/routes/hooks.ts +++ b/packages/service-core/src/routes/hooks.ts @@ -1,7 +1,7 @@ import type fastify from 'fastify'; import a from 'async'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; export type CreateRequestQueueParams = { max_queue_depth: number; diff --git a/packages/service-core/src/routes/route-register.ts b/packages/service-core/src/routes/route-register.ts index ac2011105..df95753d3 100644 --- a/packages/service-core/src/routes/route-register.ts +++ b/packages/service-core/src/routes/route-register.ts @@ -1,6 +1,6 @@ import fastify from 'fastify'; -import { container, errors, router, HTTPMethod } from '@powersync/service-framework'; +import { container, errors, router, HTTPMethod } from '@powersync/lib-services-framework'; import { Context, ContextProvider, RequestEndpoint, RequestEndpointHandlerPayload } from './router.js'; export type FastifyEndpoint = RequestEndpoint & { diff --git a/packages/service-core/src/routes/router.ts b/packages/service-core/src/routes/router.ts index 6972fff1a..6eae50531 100644 --- a/packages/service-core/src/routes/router.ts +++ b/packages/service-core/src/routes/router.ts @@ -1,4 +1,4 @@ -import { router } from '@powersync/service-framework'; +import { router } from '@powersync/lib-services-framework'; import * as auth from '../auth/auth-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; diff --git a/packages/service-core/src/runner/teardown.ts b/packages/service-core/src/runner/teardown.ts index 0cf8a9495..a964fa2f7 100644 --- a/packages/service-core/src/runner/teardown.ts +++ b/packages/service-core/src/runner/teardown.ts @@ -5,7 +5,7 @@ import * as timers from 'timers/promises'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; import * as db from '../db/db-index.js'; import * as storage from '../storage/storage-index.js'; diff --git a/packages/service-core/src/storage/MongoBucketStorage.ts b/packages/service-core/src/storage/MongoBucketStorage.ts index b3fde19b7..1fab35d78 100644 --- a/packages/service-core/src/storage/MongoBucketStorage.ts +++ b/packages/service-core/src/storage/MongoBucketStorage.ts @@ -24,7 +24,7 @@ import { PowerSyncMongo, PowerSyncMongoOptions } from './mongo/db.js'; import { SyncRuleDocument, SyncRuleState } from './mongo/models.js'; import { generateSlotName } from './mongo/util.js'; import { v4 as uuid } from 'uuid'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; export interface MongoBucketStorageOptions extends PowerSyncMongoOptions {} diff --git a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts index 325c619d7..39de47982 100644 --- a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts +++ b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts @@ -4,7 +4,7 @@ import * as mongo from 'mongodb'; import * as util from '../../util/util-index.js'; import * as replication from '../../replication/replication-index.js'; -import { container, errors } from '@powersync/service-framework'; +import { container, errors } from '@powersync/lib-services-framework'; import { BucketStorageBatch, FlushedResult, mergeToast, SaveOptions } from '../BucketStorage.js'; import { SourceTable } from '../SourceTable.js'; import { PowerSyncMongo } from './db.js'; diff --git a/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts b/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts index ad407bdb7..cb7a6c31a 100644 --- a/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts +++ b/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts @@ -2,7 +2,7 @@ import crypto from 'crypto'; import { PersistedSyncRulesContent, ReplicationLock } from '../BucketStorage.js'; import { PowerSyncMongo } from './db.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; /** * Manages a lock on a sync rules document, so that only one process diff --git a/packages/service-core/src/storage/mongo/PersistedBatch.ts b/packages/service-core/src/storage/mongo/PersistedBatch.ts index 7f9162e7b..22275441f 100644 --- a/packages/service-core/src/storage/mongo/PersistedBatch.ts +++ b/packages/service-core/src/storage/mongo/PersistedBatch.ts @@ -16,7 +16,7 @@ import { SourceKey } from './models.js'; import { serializeLookup } from './util.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; /** * Maximum size of operations we write in a single transaction. diff --git a/packages/service-core/src/sync/sync.ts b/packages/service-core/src/sync/sync.ts index f696455a9..61e398364 100644 --- a/packages/service-core/src/sync/sync.ts +++ b/packages/service-core/src/sync/sync.ts @@ -1,6 +1,6 @@ import { JSONBig, JsonContainer } from '@powersync/service-jsonbig'; import { SyncParameters } from '@powersync/service-sync-rules'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; import { Semaphore } from 'async-mutex'; import { AbortError } from 'ix/aborterror.js'; diff --git a/packages/service-core/src/system/CorePowerSyncSystem.ts b/packages/service-core/src/system/CorePowerSyncSystem.ts index 15b668ecd..002adf782 100644 --- a/packages/service-core/src/system/CorePowerSyncSystem.ts +++ b/packages/service-core/src/system/CorePowerSyncSystem.ts @@ -1,5 +1,5 @@ import * as pgwire from '@powersync/service-jpgwire'; -import { LifeCycledSystem, container } from '@powersync/service-framework'; +import { LifeCycledSystem, container } from '@powersync/lib-services-framework'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; diff --git a/packages/service-core/src/util/config/collectors/config-collector.ts b/packages/service-core/src/util/config/collectors/config-collector.ts index 06228d468..86b25c656 100644 --- a/packages/service-core/src/util/config/collectors/config-collector.ts +++ b/packages/service-core/src/util/config/collectors/config-collector.ts @@ -2,7 +2,7 @@ import * as t from 'ts-codec'; import * as yaml from 'yaml'; import { configFile } from '@powersync/service-types'; -import { schema } from '@powersync/service-framework'; +import { schema } from '@powersync/lib-services-framework'; import { RunnerConfig } from '../types.js'; diff --git a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts index 57b53302a..66ca65f63 100644 --- a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts +++ b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts @@ -1,7 +1,7 @@ import * as fs from 'fs/promises'; import * as path from 'path'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; import { ConfigCollector, ConfigFileFormat } from '../config-collector.js'; import { RunnerConfig } from '../../types.js'; diff --git a/packages/service-core/src/util/config/compound-config-collector.ts b/packages/service-core/src/util/config/compound-config-collector.ts index 0fda246b2..3be9ae56d 100644 --- a/packages/service-core/src/util/config/compound-config-collector.ts +++ b/packages/service-core/src/util/config/compound-config-collector.ts @@ -9,7 +9,7 @@ import { Base64SyncRulesCollector } from './sync-rules/impl/base64-sync-rules-co import { InlineSyncRulesCollector } from './sync-rules/impl/inline-sync-rules-collector.js'; import { FileSystemSyncRulesCollector } from './sync-rules/impl/filesystem-sync-rules-collector.js'; import { FallbackConfigCollector } from './collectors/impl/fallback-config-collector.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; const POWERSYNC_DEV_KID = 'powersync-dev'; diff --git a/packages/service-core/src/util/env.ts b/packages/service-core/src/util/env.ts index 6d14cd94f..c373371b0 100644 --- a/packages/service-core/src/util/env.ts +++ b/packages/service-core/src/util/env.ts @@ -1,4 +1,4 @@ -import { utils } from '@powersync/service-framework'; +import { utils } from '@powersync/lib-services-framework'; import { ServiceRunner } from './config/types.js'; diff --git a/packages/service-core/src/util/memory-tracking.ts b/packages/service-core/src/util/memory-tracking.ts index c50a2b788..5bc3121ee 100644 --- a/packages/service-core/src/util/memory-tracking.ts +++ b/packages/service-core/src/util/memory-tracking.ts @@ -1,4 +1,4 @@ -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; /** * Track and log memory usage. diff --git a/packages/service-core/src/util/pgwire_utils.ts b/packages/service-core/src/util/pgwire_utils.ts index 356969762..e0b1c7622 100644 --- a/packages/service-core/src/util/pgwire_utils.ts +++ b/packages/service-core/src/util/pgwire_utils.ts @@ -6,7 +6,7 @@ import * as pgwire from '@powersync/service-jpgwire'; import { SqliteJsonValue, SqliteRow, ToastableSqliteRow, toSyncRulesRow } from '@powersync/service-sync-rules'; import * as replication from '../replication/replication-index.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; /** * pgwire message -> SQLite row. diff --git a/packages/service-core/src/util/utils.ts b/packages/service-core/src/util/utils.ts index a67b47857..65f5208c5 100644 --- a/packages/service-core/src/util/utils.ts +++ b/packages/service-core/src/util/utils.ts @@ -5,7 +5,7 @@ import { pgwireRows } from '@powersync/service-jpgwire'; import * as storage from '../storage/storage-index.js'; import { BucketChecksum, OpId } from './protocol-types.js'; import { retriedQuery } from './pgwire_utils.js'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; export type ChecksumMap = Map; diff --git a/packages/service-core/test/src/env.ts b/packages/service-core/test/src/env.ts index fa207ad08..4e9e1694a 100644 --- a/packages/service-core/test/src/env.ts +++ b/packages/service-core/test/src/env.ts @@ -1,4 +1,4 @@ -import { utils } from '@powersync/service-framework'; +import { utils } from '@powersync/lib-services-framework'; export const env = utils.collectEnvironmentVariables({ MONGO_TEST_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test'), diff --git a/packages/service-core/test/src/slow_tests.test.ts b/packages/service-core/test/src/slow_tests.test.ts index 8a7c0238a..c67c35aac 100644 --- a/packages/service-core/test/src/slow_tests.test.ts +++ b/packages/service-core/test/src/slow_tests.test.ts @@ -10,7 +10,7 @@ import * as pgwire from '@powersync/service-jpgwire'; import { SqliteRow } from '@powersync/service-sync-rules'; import { MongoBucketStorage } from '../../src/storage/MongoBucketStorage.js'; import { PgManager } from '../../src/util/PgManager.js'; -import { NoOpReporter, createInMemoryProbe } from '@powersync/service-framework'; +import { NoOpReporter, createInMemoryProbe } from '@powersync/lib-services-framework'; describe('slow tests - mongodb', function () { // These are slow, inconsistent tests. diff --git a/packages/service-core/test/src/util.ts b/packages/service-core/test/src/util.ts index 35dad47aa..bea95c2d8 100644 --- a/packages/service-core/test/src/util.ts +++ b/packages/service-core/test/src/util.ts @@ -7,7 +7,7 @@ import { PowerSyncMongo } from '../../src/storage/mongo/db.js'; import { escapeIdentifier } from '../../src/util/pgwire_utils.js'; import { env } from './env.js'; import { Metrics } from '@/metrics/Metrics.js'; -import { NoOpReporter } from '@powersync/service-framework'; +import { NoOpReporter } from '@powersync/lib-services-framework'; // The metrics need to be initialised before they can be used await Metrics.initialise({ diff --git a/packages/service-core/test/src/wal_stream_utils.ts b/packages/service-core/test/src/wal_stream_utils.ts index 9c3b6270b..3a8128640 100644 --- a/packages/service-core/test/src/wal_stream_utils.ts +++ b/packages/service-core/test/src/wal_stream_utils.ts @@ -6,7 +6,7 @@ import { getClientCheckpoint } from '../../src/util/utils.js'; import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js'; import { PgManager } from '../../src/util/PgManager.js'; import { JSONBig } from '@powersync/service-jsonbig'; -import { NoOpReporter, createInMemoryProbe } from '@powersync/service-framework'; +import { NoOpReporter, createInMemoryProbe } from '@powersync/lib-services-framework'; /** * Tests operating on the wal stream need to configure the stream and manage asynchronous diff --git a/packages/service-core/tsconfig.json b/packages/service-core/tsconfig.json index 16e5ad0df..21f96ceed 100644 --- a/packages/service-core/tsconfig.json +++ b/packages/service-core/tsconfig.json @@ -25,7 +25,7 @@ "path": "../sync-rules" }, { - "path": "../service-framework" + "path": "../../libs/lib-services" } ] } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 196594ec3..19c2dbf03 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -54,6 +54,40 @@ importers: specifier: ^8.2.3 version: 8.2.3 + libs/lib-services: + dependencies: + ajv: + specifier: ^8.12.0 + version: 8.14.0 + better-ajv-errors: + specifier: ^1.2.0 + version: 1.2.0(ajv@8.14.0) + bson: + specifier: ^6.6.0 + version: 6.7.0 + dotenv: + specifier: ^16.4.5 + version: 16.4.5 + lodash: + specifier: ^4.17.21 + version: 4.17.21 + ts-codec: + specifier: ^1.2.2 + version: 1.2.2 + winston: + specifier: ^3.13.0 + version: 3.13.0 + zod: + specifier: ^3.23.8 + version: 3.23.8 + devDependencies: + '@types/lodash': + specifier: ^4.17.5 + version: 4.17.5 + vitest: + specifier: ^0.34.6 + version: 0.34.6 + packages/jpgwire: dependencies: '@powersync/service-jsonbig': @@ -77,9 +111,9 @@ importers: packages/rsocket-router: dependencies: - '@powersync/service-framework': + '@powersync/lib-services-framework': specifier: workspace:* - version: link:../service-framework + version: link:../../libs/lib-services rsocket-core: specifier: 1.0.0-alpha.3 version: 1.0.0-alpha.3 @@ -132,9 +166,9 @@ importers: '@opentelemetry/sdk-metrics': specifier: 1.24.1 version: 1.24.1(@opentelemetry/api@1.8.0) - '@powersync/service-framework': + '@powersync/lib-services-framework': specifier: workspace:* - version: link:../service-framework + version: link:../../libs/lib-services '@powersync/service-jpgwire': specifier: workspace:* version: link:../jpgwire @@ -227,40 +261,6 @@ importers: specifier: ^0.34.6 version: 0.34.6 - packages/service-framework: - dependencies: - ajv: - specifier: ^8.12.0 - version: 8.14.0 - better-ajv-errors: - specifier: ^1.2.0 - version: 1.2.0(ajv@8.14.0) - bson: - specifier: ^6.6.0 - version: 6.7.0 - dotenv: - specifier: ^16.4.5 - version: 16.4.5 - lodash: - specifier: ^4.17.21 - version: 4.17.21 - ts-codec: - specifier: ^1.2.2 - version: 1.2.2 - winston: - specifier: ^3.13.0 - version: 3.13.0 - zod: - specifier: ^3.23.8 - version: 3.23.8 - devDependencies: - '@types/lodash': - specifier: ^4.17.5 - version: 4.17.5 - vitest: - specifier: ^0.34.6 - version: 0.34.6 - packages/sync-rules: dependencies: '@powersync/service-jsonbig': @@ -306,12 +306,12 @@ importers: '@opentelemetry/sdk-metrics': specifier: ^1.17.0 version: 1.24.1(@opentelemetry/api@1.6.0) + '@powersync/lib-services-framework': + specifier: workspace:* + version: link:../libs/lib-services '@powersync/service-core': specifier: workspace:* version: link:../packages/service-core - '@powersync/service-framework': - specifier: workspace:* - version: link:../packages/service-framework '@powersync/service-jpgwire': specifier: workspace:* version: link:../packages/jpgwire @@ -4955,7 +4955,7 @@ snapshots: '@opentelemetry/semantic-conventions': 1.25.0 '@prisma/instrumentation': 5.15.0 '@sentry/core': 8.9.2 - '@sentry/opentelemetry': 8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.6.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.0) + '@sentry/opentelemetry': 8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.6.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.0) '@sentry/types': 8.9.2 '@sentry/utils': 8.9.2 optionalDependencies: @@ -4963,7 +4963,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@sentry/opentelemetry@8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.6.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.6.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.0)': + '@sentry/opentelemetry@8.9.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.52.0(@opentelemetry/api@1.6.0))(@opentelemetry/sdk-trace-base@1.25.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.0(@opentelemetry/api@1.9.0) diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 6df48ddb9..6324ebdd5 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,5 +1,6 @@ packages: - 'packages/*' + - 'libs/*' - 'service' # exclude packages that are inside test directories - '!**/test/**' diff --git a/service/package.json b/service/package.json index 0bb8495f8..ed0c5845c 100644 --- a/service/package.json +++ b/service/package.json @@ -15,7 +15,7 @@ "@opentelemetry/exporter-prometheus": "^0.43.0", "@opentelemetry/sdk-metrics": "^1.17.0", "@powersync/service-core": "workspace:*", - "@powersync/service-framework": "workspace:*", + "@powersync/lib-services-framework": "workspace:*", "@powersync/service-jpgwire": "workspace:*", "@powersync/service-jsonbig": "workspace:*", "@powersync/service-rsocket-router": "workspace:*", diff --git a/service/src/entry.ts b/service/src/entry.ts index 750388f4f..8bb9d3f8a 100644 --- a/service/src/entry.ts +++ b/service/src/entry.ts @@ -1,6 +1,6 @@ import winston from 'winston'; import { entry, utils } from '@powersync/service-core'; -import { container, Logger } from '@powersync/service-framework'; +import { container, Logger } from '@powersync/lib-services-framework'; // Configure logging to console container.logger.configure({ diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index 86181449a..17dcf126b 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -2,7 +2,7 @@ import { deserialize } from 'bson'; import fastify from 'fastify'; import cors from '@fastify/cors'; import * as core from '@powersync/service-core'; -import { container, errors } from '@powersync/service-framework'; +import { container, errors } from '@powersync/lib-services-framework'; import { RSocketRequestMeta } from '@powersync/service-rsocket-router'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; diff --git a/service/src/runners/stream-worker.ts b/service/src/runners/stream-worker.ts index 4cfc63416..3a5cb61d9 100644 --- a/service/src/runners/stream-worker.ts +++ b/service/src/runners/stream-worker.ts @@ -1,5 +1,5 @@ import { migrations, replication, utils, Metrics } from '@powersync/service-core'; -import { container } from '@powersync/service-framework'; +import { container } from '@powersync/lib-services-framework'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; diff --git a/service/src/util/alerting.ts b/service/src/util/alerting.ts index 67e562873..b075c9161 100644 --- a/service/src/util/alerting.ts +++ b/service/src/util/alerting.ts @@ -1,7 +1,7 @@ import * as sentry_types from '@sentry/types'; import * as sentry from '@sentry/node'; import { utils } from '@powersync/service-core'; -import { container, ErrorReporter } from '@powersync/service-framework'; +import { container, ErrorReporter } from '@powersync/lib-services-framework'; // Generally ignore errors that are due to configuration issues, rather than // service bugs. diff --git a/service/tsconfig.json b/service/tsconfig.json index 636c76cca..22d065e42 100644 --- a/service/tsconfig.json +++ b/service/tsconfig.json @@ -22,7 +22,7 @@ "path": "../packages/service-core" }, { - "path": "../packages/service-framework" + "path": "../libs/lib-services" }, { "path": "../packages/sync-rules" diff --git a/tsconfig.json b/tsconfig.json index 60a401f3d..bce764b1d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -17,7 +17,7 @@ "path": "./packages/sync-rules" }, { - "path": "./packages/service-framework" + "path": "./libs/lib-services" }, { "path": "./packages/types" From 84ef718ea0ae5a0baac104546a20385b79b5a70c Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 12:23:46 +0200 Subject: [PATCH 20/36] add developing instructions --- .env.template | 3 ++ .gitignore | 1 + DEVELOP.md | 53 +++++++++++++++++++ packages/rsocket-router/tests/tsconfig.json | 2 +- .../service-core/test/src/slow_tests.test.ts | 9 +--- packages/service-core/test/src/util.ts | 3 +- .../service-core/test/src/wal_stream_utils.ts | 5 +- service/local-dev/powersync-template.yaml | 33 ++++++++++++ service/local-dev/sync-rules-template.yaml | 7 +++ 9 files changed, 102 insertions(+), 14 deletions(-) create mode 100644 .env.template create mode 100644 service/local-dev/powersync-template.yaml create mode 100644 service/local-dev/sync-rules-template.yaml diff --git a/.env.template b/.env.template new file mode 100644 index 000000000..c8f358588 --- /dev/null +++ b/.env.template @@ -0,0 +1,3 @@ +# Connections for tests +MONGO_TEST_UR="mongodb://localhost:27017/powersync_test" +PG_TEST_URL="postgres://postgres:postgres@localhost:5432/powersync_test" \ No newline at end of file diff --git a/.gitignore b/.gitignore index 9be3d7003..e5940be3b 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ npm-error.log .local-dev .probes powersync.yaml +sync-rules.yaml packages/*/manifest.json diff --git a/DEVELOP.md b/DEVELOP.md index 8c8917446..743c45438 100644 --- a/DEVELOP.md +++ b/DEVELOP.md @@ -14,6 +14,59 @@ The project uses TypeScript. Build packages with pnpm build ``` +# Running Service + +## Dependent Services + +The PowerSync service requires Postgres and MongoDB server connections. These configuration details can be specified in a `powersync.yaml` (or JSON) configuration file. + +See the [Self hosting demo](https://github.com/powersync-ja/self-host-demo) for examples of starting these services. + +A quick method for running all required services with a handy backend and frontend is to run the following in a checked-out `self-host-demo` folder. + +```bash +docker compose up --scale powersync=0 +``` + +This will start all the services defined in the Self hosting demo except for the PowerSync service - which will be started from this repository. + +## Local Configuration + +The `./service` folder contains a NodeJS project which starts all PowerSync service operations. + +Copy the template configuration files and configure any changes to your local needs. + +```bash +cd ./service +cp local-dev/powersync-template.yaml powersync.yaml +cp local-dev/sync-rules-template.yaml sync-rules.yaml +``` + +## Starting Service + +The service can be started with watching changes to any consumed source files by running the `pnpm watch:service` command in the repository root. + +# Running Tests + +Most packages should contain a `test` script which can be executed with `pnpm test`. Some packages may require additional setup to run tests. + +## Service Core + +Some tests for these packages require a connection to MongoDB and Postgres. Connection strings for these services should be available as environment variables. See [Running Tests](#running-services) for details on configuring those services. + +These can be set in a terminal/shell + +```bash +export MONGO_TEST_UR="mongodb://localhost:27017/powersync_test" +export PG_TEST_URL="postgres://postgres:postgres@localhost:5432/powersync_test" +``` + +or by copying the `.env.template` file and using a loader such as [Direnv](https://direnv.net/) + +```bash +cp .env.template .env +``` + # Releases This repository uses Changesets. Add changesets to changed packages before merging PRs. diff --git a/packages/rsocket-router/tests/tsconfig.json b/packages/rsocket-router/tests/tsconfig.json index e8c037025..6f06c7eda 100644 --- a/packages/rsocket-router/tests/tsconfig.json +++ b/packages/rsocket-router/tests/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../../../tsconfig.base.json", + "extends": "../../../tsconfig.base.json", "compilerOptions": { "rootDir": "src", "noEmit": true, diff --git a/packages/service-core/test/src/slow_tests.test.ts b/packages/service-core/test/src/slow_tests.test.ts index c67c35aac..fd8fc7728 100644 --- a/packages/service-core/test/src/slow_tests.test.ts +++ b/packages/service-core/test/src/slow_tests.test.ts @@ -10,7 +10,6 @@ import * as pgwire from '@powersync/service-jpgwire'; import { SqliteRow } from '@powersync/service-sync-rules'; import { MongoBucketStorage } from '../../src/storage/MongoBucketStorage.js'; import { PgManager } from '../../src/util/PgManager.js'; -import { NoOpReporter, createInMemoryProbe } from '@powersync/lib-services-framework'; describe('slow tests - mongodb', function () { // These are slow, inconsistent tests. @@ -72,9 +71,7 @@ bucket_definitions: abort_signal: abortController.signal, connections, storage: storage, - factory: f, - probe: createInMemoryProbe(), - errorReporter: NoOpReporter + factory: f }; walStream = new WalStream(options); @@ -189,9 +186,7 @@ bucket_definitions: abort_signal: abortController.signal, connections, storage: storage, - factory: f, - probe: createInMemoryProbe(), - errorReporter: NoOpReporter + factory: f }; walStream = new WalStream(options); diff --git a/packages/service-core/test/src/util.ts b/packages/service-core/test/src/util.ts index bea95c2d8..ce2f255bb 100644 --- a/packages/service-core/test/src/util.ts +++ b/packages/service-core/test/src/util.ts @@ -7,7 +7,6 @@ import { PowerSyncMongo } from '../../src/storage/mongo/db.js'; import { escapeIdentifier } from '../../src/util/pgwire_utils.js'; import { env } from './env.js'; import { Metrics } from '@/metrics/Metrics.js'; -import { NoOpReporter } from '@powersync/lib-services-framework'; // The metrics need to be initialised before they can be used await Metrics.initialise({ @@ -24,7 +23,7 @@ export type StorageFactory = () => Promise; export const MONGO_STORAGE_FACTORY: StorageFactory = async () => { const db = await connectMongo(); await db.clear(); - return new MongoBucketStorage(db, { slot_name_prefix: 'test_', errorReporter: NoOpReporter }); + return new MongoBucketStorage(db, { slot_name_prefix: 'test_' }); }; export async function clearTestDb(db: pgwire.PgClient) { diff --git a/packages/service-core/test/src/wal_stream_utils.ts b/packages/service-core/test/src/wal_stream_utils.ts index 3a8128640..92ad02580 100644 --- a/packages/service-core/test/src/wal_stream_utils.ts +++ b/packages/service-core/test/src/wal_stream_utils.ts @@ -6,7 +6,6 @@ import { getClientCheckpoint } from '../../src/util/utils.js'; import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js'; import { PgManager } from '../../src/util/PgManager.js'; import { JSONBig } from '@powersync/service-jsonbig'; -import { NoOpReporter, createInMemoryProbe } from '@powersync/lib-services-framework'; /** * Tests operating on the wal stream need to configure the stream and manage asynchronous @@ -70,9 +69,7 @@ export class WalStreamTestContext { storage: this.storage, factory: this.factory, connections: this.connections, - abort_signal: this.abortController.signal, - probe: createInMemoryProbe(), - errorReporter: NoOpReporter + abort_signal: this.abortController.signal }; this._walStream = new WalStream(options); return this._walStream!; diff --git a/service/local-dev/powersync-template.yaml b/service/local-dev/powersync-template.yaml new file mode 100644 index 000000000..9d9e5d5c2 --- /dev/null +++ b/service/local-dev/powersync-template.yaml @@ -0,0 +1,33 @@ +# powersync.yaml +# Very similar to the config on Collide, but a little more low-level +# See https://github.com/powersync-ja/powersync-service/blob/c6140e883a4a2ac9c8c2e46b7c31ad38e1c6d28a/packages/types/src/config/PowerSyncConfig.ts#L95 + +migrations: + disable_auto_migrate: true + +replication: + connections: + - type: postgresql + uri: postgres://postgres:mypassword@localhost:5432/postgres + sslmode: disable # okay for local/private network, not for public network + +storage: + type: mongodb + uri: mongodb://localhost:27017/powersync_demo + +port: 8080 + +sync_rules: + # This path is relative to the `powersync.yaml` file + path: sync-rules.yaml + +# Client (application end user) authentication settings +client_auth: + # Enable this if using Supabase Auth + # supabase: true + + # JWKS URIs can be specified here + jwks_uri: http://localhost:6060/api/auth/keys + + # JWKS audience + audience: ['powersync-dev', 'powersync'] diff --git a/service/local-dev/sync-rules-template.yaml b/service/local-dev/sync-rules-template.yaml new file mode 100644 index 000000000..1e7d3c1d9 --- /dev/null +++ b/service/local-dev/sync-rules-template.yaml @@ -0,0 +1,7 @@ +# See Documentation for more information: +# https://docs.powersync.com/usage/sync-rules +bucket_definitions: + global: + data: + - SELECT * FROM lists + - SELECT * FROM todos From 6506e0c3f38213030a8a068ae6569c5718893ffd Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 12:40:34 +0200 Subject: [PATCH 21/36] fix tests --- packages/service-core/test/src/auth.test.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/service-core/test/src/auth.test.ts b/packages/service-core/test/src/auth.test.ts index 7fb710c52..e80970ed9 100644 --- a/packages/service-core/test/src/auth.test.ts +++ b/packages/service-core/test/src/auth.test.ts @@ -262,7 +262,8 @@ describe('JWT Auth', () => { expect(errors).toEqual([]); expect(keys.length).toBeGreaterThanOrEqual(1); - const invalid = new RemoteJWKSCollector('https://localhost/.well-known/jwks.json', { + // The localhost hostname fails to resolve correctly on MacOS https://github.com/nodejs/help/issues/2163 + const invalid = new RemoteJWKSCollector('https://127.0.0.1/.well-known/jwks.json', { block_local_ip: true }); expect(invalid.getKeys()).rejects.toThrow('IPs in this range are not supported'); @@ -278,7 +279,8 @@ describe('JWT Auth', () => { expect(errors).toEqual([]); expect(keys.length).toBeGreaterThanOrEqual(1); - const invalid = new RemoteJWKSCollector('https://localhost/.well-known/jwks.json'); + // The localhost hostname fails to resolve correctly on MacOS https://github.com/nodejs/help/issues/2163 + const invalid = new RemoteJWKSCollector('https://127.0.0.1/.well-known/jwks.json'); // Should try and fetch expect(invalid.getKeys()).rejects.toThrow('ECONNREFUSED'); }); From 3b1a518f9c549fbae700b4b711d4026ed6f9c89a Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 12:40:58 +0200 Subject: [PATCH 22/36] update local test instructions --- DEVELOP.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/DEVELOP.md b/DEVELOP.md index 743c45438..7eff065a0 100644 --- a/DEVELOP.md +++ b/DEVELOP.md @@ -67,6 +67,14 @@ or by copying the `.env.template` file and using a loader such as [Direnv](https cp .env.template .env ``` +## Postgres Configuration + +The default `PG_TEST_URL` points to a `powersync_test` database. Ensure this is created by executing the following SQL on your connection. + +```SQL +CREATE DATABASE powersync_test; +``` + # Releases This repository uses Changesets. Add changesets to changed packages before merging PRs. From b5a106d7f1b5370c14c2c6c88c7700461fa17b45 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 14:12:01 +0200 Subject: [PATCH 23/36] DockerFile additions --- libs/lib-services/README.md | 3 +++ libs/lib-services/package.json | 3 --- service/Dockerfile | 4 ++++ service/src/runners/server.ts | 1 + 4 files changed, 8 insertions(+), 3 deletions(-) create mode 100644 libs/lib-services/README.md diff --git a/libs/lib-services/README.md b/libs/lib-services/README.md new file mode 100644 index 000000000..8116e4920 --- /dev/null +++ b/libs/lib-services/README.md @@ -0,0 +1,3 @@ +# Service Library + +A library containing base definitions for interacting with Micro services. diff --git a/libs/lib-services/package.json b/libs/lib-services/package.json index e26e8bcb8..122c7a78b 100644 --- a/libs/lib-services/package.json +++ b/libs/lib-services/package.json @@ -6,9 +6,6 @@ "type": "module", "types": "dist/index.d.ts", "license": "FSL-1.1-Apache-2.0", - "publishConfig": { - "access": "restricted" - }, "files": [ "dist/**/*" ], diff --git a/service/Dockerfile b/service/Dockerfile index f942246df..7351662af 100644 --- a/service/Dockerfile +++ b/service/Dockerfile @@ -15,6 +15,8 @@ COPY packages/service-core/package.json packages/service-core/tsconfig.json pack COPY packages/sync-rules/package.json packages/sync-rules/tsconfig.json packages/sync-rules/ COPY packages/types/package.json packages/types/tsconfig.json packages/types/ +COPY libs/lib-services/package.json libs/lib-services/tsconfig.json libs/lib-services/ + RUN pnpm install --frozen-lockfile COPY service/src service/src/ @@ -27,6 +29,8 @@ COPY packages/sync-rules/src packages/sync-rules/src/ COPY packages/rsocket-router/src packages/rsocket-router/src/ COPY packages/types/src packages/types/src/ +COPY libs/lib-services/src libs/lib-services/src/ + RUN pnpm build:production && \ rm -rf node_modules **/node_modules && \ pnpm install --frozen-lockfile --prod --ignore-scripts diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index 17dcf126b..d2d4bd102 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -119,6 +119,7 @@ export async function startServer(runnerConfig: core.utils.RunnerConfig) { core.Metrics.getInstance().configureApiMetrics(); await server.listen({ + host: '0.0.0.0', port: system.config.port }); From 62bd56ea91b30a0551b9aa4c5ced55742e072e4e Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 14:33:28 +0200 Subject: [PATCH 24/36] logger improvements --- DEVELOP.md | 2 ++ libs/lib-services/src/container.ts | 12 -------- libs/lib-services/src/logger/Logger.ts | 8 +++++ .../src/signals/termination-handler.ts | 14 ++++----- .../src/router/ReactiveSocketRouter.ts | 5 +--- .../transport/WebSocketServerTransport.ts | 4 +-- .../transport/WebsocketDuplexConnection.ts | 8 ++--- packages/service-core/src/api/diagnostics.ts | 4 +-- packages/service-core/src/auth/KeyStore.ts | 4 +-- packages/service-core/src/entry/cli-entry.ts | 4 +-- packages/service-core/src/metrics/Metrics.ts | 12 ++++---- .../service-core/src/migrations/executor.ts | 8 ++--- .../service-core/src/replication/WalStream.ts | 30 +++++++++---------- .../src/replication/WalStreamManager.ts | 20 ++++++------- .../src/replication/WalStreamRunner.ts | 12 ++++---- .../src/routes/endpoints/socket-route.ts | 4 +-- .../src/routes/endpoints/sync-stream.ts | 4 +-- packages/service-core/src/routes/hooks.ts | 5 ++-- .../service-core/src/routes/route-register.ts | 4 +-- packages/service-core/src/runner/teardown.ts | 19 ++++++------ .../src/storage/MongoBucketStorage.ts | 8 ++--- .../src/storage/mongo/MongoBucketBatch.ts | 18 +++++------ .../src/storage/mongo/MongoSyncRulesLock.ts | 6 ++-- .../src/storage/mongo/PersistedBatch.ts | 4 +-- packages/service-core/src/sync/sync.ts | 8 ++--- .../src/system/CorePowerSyncSystem.ts | 4 +-- .../impl/filesystem-config-collector.ts | 4 +-- .../util/config/compound-config-collector.ts | 6 ++-- .../service-core/src/util/memory-tracking.ts | 4 +-- .../service-core/src/util/pgwire_utils.ts | 4 +-- packages/service-core/src/util/utils.ts | 8 ++--- service/src/entry.ts | 9 ------ service/src/runners/server.ts | 8 ++--- service/src/runners/stream-worker.ts | 4 +-- service/src/util/alerting.ts | 6 ++-- 35 files changed, 129 insertions(+), 155 deletions(-) diff --git a/DEVELOP.md b/DEVELOP.md index 7eff065a0..dbf443d7f 100644 --- a/DEVELOP.md +++ b/DEVELOP.md @@ -28,6 +28,8 @@ A quick method for running all required services with a handy backend and fronte docker compose up --scale powersync=0 ``` +Note: The `mongo` hostname specified in the MongoDB replica set needs to be accessible by your host machine if using the Mongo service above. + This will start all the services defined in the Self hosting demo except for the PowerSync service - which will be started from this repository. ## Local Configuration diff --git a/libs/lib-services/src/container.ts b/libs/lib-services/src/container.ts index 49c326eb4..a26b79347 100644 --- a/libs/lib-services/src/container.ts +++ b/libs/lib-services/src/container.ts @@ -1,18 +1,14 @@ -import winston, { Logger } from 'winston'; - import { ErrorReporter } from './alerts/definitions.js'; import { NoOpReporter } from './alerts/no-op-reporter.js'; import { ProbeModule, TerminationHandler, createFSProbe, createTerminationHandler } from './signals/signals-index.js'; export enum ContainerImplementation { - LOGGER = 'logger', REPORTER = 'reporter', PROBES = 'probes', TERMINATION_HANDLER = 'termination-handler' } export type ContainerImplementationTypes = { - [ContainerImplementation.LOGGER]: Logger; [ContainerImplementation.REPORTER]: ErrorReporter; [ContainerImplementation.PROBES]: ProbeModule; [ContainerImplementation.TERMINATION_HANDLER]: TerminationHandler; @@ -21,13 +17,6 @@ export type ContainerImplementationTypes = { export class Container { protected implementations: ContainerImplementationTypes; - /** - * Logger which can be used throughout the entire project - */ - get logger() { - return this.implementations[ContainerImplementation.LOGGER]; - } - /** * Manager for system health probes */ @@ -51,7 +40,6 @@ export class Container { constructor() { this.implementations = { - [ContainerImplementation.LOGGER]: winston.createLogger(), [ContainerImplementation.REPORTER]: NoOpReporter, [ContainerImplementation.PROBES]: createFSProbe(), [ContainerImplementation.TERMINATION_HANDLER]: createTerminationHandler() diff --git a/libs/lib-services/src/logger/Logger.ts b/libs/lib-services/src/logger/Logger.ts index 33a9eead6..1646f407f 100644 --- a/libs/lib-services/src/logger/Logger.ts +++ b/libs/lib-services/src/logger/Logger.ts @@ -7,3 +7,11 @@ export namespace Logger { ); export const production_format = winston.format.combine(winston.format.timestamp(), winston.format.json()); } + +export const logger = winston.createLogger(); + +// Configure logging to console as the default +logger.configure({ + format: process.env.NODE_ENV == 'production' ? Logger.production_format : Logger.development_format, + transports: [new winston.transports.Console()] +}); diff --git a/libs/lib-services/src/signals/termination-handler.ts b/libs/lib-services/src/signals/termination-handler.ts index 1dd63f04c..9edbf1995 100644 --- a/libs/lib-services/src/signals/termination-handler.ts +++ b/libs/lib-services/src/signals/termination-handler.ts @@ -1,5 +1,5 @@ import _ from 'lodash'; -import { container } from '../container.js'; +import { logger } from '../logger/Logger.js'; export enum Signal { SIGTERM = 'SIGTERM', @@ -35,14 +35,14 @@ export const createTerminationHandler = (params?: TerminationHandlerParams) => { let signal_received = false; const signalHandler = (signal: Signal) => { if (signal === Signal.SIGINT) { - container.logger.info('Send ^C again to force exit'); + logger.info('Send ^C again to force exit'); } if (signal_received) { // The SIGINT signal is sent on ctrl-c - if the user presses ctrl-c twice then we // hard exit if (signal === Signal.SIGINT) { - container.logger.info('Received second ^C. Exiting'); + logger.info('Received second ^C. Exiting'); process.exit(1); } return; @@ -51,24 +51,24 @@ export const createTerminationHandler = (params?: TerminationHandlerParams) => { signal_received = true; new Promise(async (resolve) => { - container.logger.info('Terminating gracefully ...'); + logger.info('Terminating gracefully ...'); for (const handler of handlers) { try { await handler(signal); } catch (err) { - container.logger.error('Failed to execute termination handler', err); + logger.error('Failed to execute termination handler', err); } } - container.logger.info('Exiting'); + logger.info('Exiting'); resolve(); }).then(() => { process.exit(0); }); setTimeout(() => { - container.logger.error('Timed out waiting for program to exit. Force exiting'); + logger.error('Timed out waiting for program to exit. Force exiting'); process.exit(1); }, timeout_ms); }; diff --git a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts index 77610f6b0..e500150f0 100644 --- a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts +++ b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts @@ -16,7 +16,7 @@ import { SocketResponder } from './types.js'; import { WebsocketServerTransport } from './transport/WebSocketServerTransport.js'; -import { container, errors } from '@powersync/lib-services-framework'; +import { errors, logger } from '@powersync/lib-services-framework'; export class ReactiveSocketRouter { protected activeConnections: number; @@ -55,8 +55,6 @@ export class ReactiveSocketRouter { wsCreator: () => wss }); - const { logger } = container; - const rSocketServer = new RSocketServer({ transport, acceptor: { @@ -124,7 +122,6 @@ export async function handleReactiveStream( ) { const { payload, responder, initialN } = request; const { metadata } = payload; - const { logger } = container; const exitWithError = (error: any) => { responder.onError(error); diff --git a/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts b/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts index 28b86518c..53e802b50 100644 --- a/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts +++ b/packages/rsocket-router/src/router/transport/WebSocketServerTransport.ts @@ -28,7 +28,7 @@ import { } from 'rsocket-core'; import * as WebSocket from 'ws'; import { WebsocketDuplexConnection } from './WebsocketDuplexConnection.js'; -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; export type SocketFactory = (options: SocketOptions) => WebSocket.WebSocketServer; @@ -76,7 +76,7 @@ export class WebsocketServerTransport implements ServerTransport { const duplex = WebSocket.createWebSocketStream(websocket); WebsocketDuplexConnection.create(duplex, connectionAcceptor, multiplexerDemultiplexerFactory, websocket); } catch (ex) { - container.logger.error(`Could not create duplex connection`, ex); + logger.error(`Could not create duplex connection`, ex); if (websocket.readyState == websocket.OPEN) { websocket.close(); } diff --git a/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts b/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts index 059cf4684..b9e5f088c 100644 --- a/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts +++ b/packages/rsocket-router/src/router/transport/WebsocketDuplexConnection.ts @@ -15,7 +15,7 @@ * limitations under the License. */ -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; import { Closeable, Deferred, @@ -93,7 +93,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect }; private handleError = (e: WebSocket.ErrorEvent): void => { - container.logger.error(`Error in WebSocket duplex connection: ${e}`); + logger.error(`Error in WebSocket duplex connection: ${e}`); this.close(e.error); }; @@ -123,7 +123,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect throw new Error(`Unable to deserialize frame`); } } catch (ex) { - container.logger.info(`Received error deserializing initial frame buffer. Skipping connection request.`, ex); + logger.info(`Received error deserializing initial frame buffer. Skipping connection request.`, ex); // The initial frame should always be parsable return socket.end(); } @@ -137,7 +137,7 @@ export class WebsocketDuplexConnection extends Deferred implements DuplexConnect await connectionAcceptor(frame, connection); socket.resume(); } catch (error) { - container.logger.info(`Error accepting connection:`, error); + logger.info(`Error accepting connection:`, error); connection.close(error); } }); diff --git a/packages/service-core/src/api/diagnostics.ts b/packages/service-core/src/api/diagnostics.ts index 87a9a27aa..46a7cde98 100644 --- a/packages/service-core/src/api/diagnostics.ts +++ b/packages/service-core/src/api/diagnostics.ts @@ -7,7 +7,7 @@ import * as storage from '../storage/storage-index.js'; import * as util from '../util/util-index.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; export async function getConnectionStatus(system: CorePowerSyncSystem): Promise { if (system.pgwire_pool == null) { @@ -134,7 +134,7 @@ export async function getSyncRulesStatus( } } catch (e) { // Ignore - container.logger.warn(`Unable to get replication lag`, e); + logger.warn(`Unable to get replication lag`, e); } } } else { diff --git a/packages/service-core/src/auth/KeyStore.ts b/packages/service-core/src/auth/KeyStore.ts index 9fbaf8417..78828b3bc 100644 --- a/packages/service-core/src/auth/KeyStore.ts +++ b/packages/service-core/src/auth/KeyStore.ts @@ -3,7 +3,7 @@ import secs from '../util/secs.js'; import { KeyOptions, KeySpec, SUPPORTED_ALGORITHMS } from './KeySpec.js'; import { KeyCollector } from './KeyCollector.js'; import { JwtPayload } from './JwtPayload.js'; -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; /** * KeyStore to get keys and verify tokens. @@ -145,7 +145,7 @@ export class KeyStore { this.collector.noKeyFound?.().catch((e) => { // Typically this error would be stored on the collector. // This is just a last resort error handling. - container.logger.error(`Failed to refresh keys`, e); + logger.error(`Failed to refresh keys`, e); }); throw new jose.errors.JOSEError( diff --git a/packages/service-core/src/entry/cli-entry.ts b/packages/service-core/src/entry/cli-entry.ts index 31026bd19..2ebeb7549 100644 --- a/packages/service-core/src/entry/cli-entry.ts +++ b/packages/service-core/src/entry/cli-entry.ts @@ -4,7 +4,7 @@ import * as utils from '../util/util-index.js'; import { registerMigrationAction } from './commands/migrate-action.js'; import { registerTearDownAction } from './commands/teardown-action.js'; import { registerStartAction } from './entry-index.js'; -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; /** * Generates a Commander program which serves as the entry point @@ -32,7 +32,7 @@ export function generateEntryProgram(startHandlers?: Record { @@ -212,7 +212,7 @@ Anonymous telemetry is currently: ${options.disable_telemetry_sharing ? 'disable function getMetrics() { if (cachedRequest == null || Date.now() - cacheTimestamp > MINIMUM_INTERVAL) { cachedRequest = system.storage.getStorageMetrics().catch((e) => { - container.logger.error(`Failed to get storage metrics`, e); + logger.error(`Failed to get storage metrics`, e); return null; }); cacheTimestamp = Date.now(); diff --git a/packages/service-core/src/migrations/executor.ts b/packages/service-core/src/migrations/executor.ts index 39c3f4c51..edff07b45 100644 --- a/packages/service-core/src/migrations/executor.ts +++ b/packages/service-core/src/migrations/executor.ts @@ -1,6 +1,6 @@ +import { logger } from '@powersync/lib-services-framework'; import * as defs from './definitions.js'; import { MigrationStore } from './store/migration-store.js'; -import { container } from '@powersync/lib-services-framework'; type ExecuteParams = { migrations: defs.Migration[]; @@ -42,7 +42,7 @@ export async function* execute(params: ExecuteParams): AsyncGenerator { // Failures here are okay - this only speeds up stopping the process. - container.logger.warn('Failed to ping connection', e); + logger.warn('Failed to ping connection', e); }); } else { // If we haven't started streaming yet, it could be due to something like @@ -157,9 +157,7 @@ export class WalStream { ] }); if (rs.rows.length == 0) { - container.logger.info( - `Skipping ${tablePattern.schema}.${name} - not part of ${this.publication_name} publication` - ); + logger.info(`Skipping ${tablePattern.schema}.${name} - not part of ${this.publication_name} publication`); continue; } @@ -189,7 +187,7 @@ export class WalStream { const status = await this.storage.getStatus(); if (status.snapshot_done && status.checkpoint_lsn) { - container.logger.info(`${slotName} Initial replication already done`); + logger.info(`${slotName} Initial replication already done`); let last_error = null; @@ -221,11 +219,11 @@ export class WalStream { ] }); // Success - container.logger.info(`Slot ${slotName} appears healthy`); + logger.info(`Slot ${slotName} appears healthy`); return { needsInitialSync: false }; } catch (e) { last_error = e; - container.logger.warn(`${slotName} Replication slot error`, e); + logger.warn(`${slotName} Replication slot error`, e); if (this.stopped) { throw e; @@ -252,7 +250,7 @@ export class WalStream { // Sample: publication "powersync" does not exist // Happens when publication deleted or never created. // Slot must be re-created in this case. - container.logger.info(`${slotName} does not exist anymore, will create new slot`); + logger.info(`${slotName} does not exist anymore, will create new slot`); throw new MissingReplicationSlotError(`Replication slot ${slotName} does not exist anymore`); } @@ -315,7 +313,7 @@ WHERE oid = $1::regclass`, // with streaming replication. const lsn = pgwire.lsnMakeComparable(row[1]); const snapshot = row[2]; - container.logger.info(`Created replication slot ${slotName} at ${lsn} with snapshot ${snapshot}`); + logger.info(`Created replication slot ${slotName} at ${lsn} with snapshot ${snapshot}`); // https://stackoverflow.com/questions/70160769/postgres-logical-replication-starting-from-given-lsn await db.query('BEGIN'); @@ -337,9 +335,9 @@ WHERE oid = $1::regclass`, // On Supabase, the default is 2 minutes. await db.query(`set local statement_timeout = 0`); - container.logger.info(`${slotName} Starting initial replication`); + logger.info(`${slotName} Starting initial replication`); await this.initialReplication(db, lsn); - container.logger.info(`${slotName} Initial replication done`); + logger.info(`${slotName} Initial replication done`); await db.query('COMMIT'); } catch (e) { await db.query('ROLLBACK'); @@ -370,7 +368,7 @@ WHERE oid = $1::regclass`, } private async snapshotTable(batch: storage.BucketStorageBatch, db: pgwire.PgConnection, table: storage.SourceTable) { - container.logger.info(`${this.slot_name} Replicating ${table.qualifiedName}`); + logger.info(`${this.slot_name} Replicating ${table.qualifiedName}`); const estimatedCount = await this.estimatedCount(db, table); let at = 0; let lastLogIndex = 0; @@ -396,7 +394,7 @@ WHERE oid = $1::regclass`, return q; }); if (rows.length > 0 && at - lastLogIndex >= 5000) { - container.logger.info(`${this.slot_name} Replicating ${table.qualifiedName} ${at}/${estimatedCount}`); + logger.info(`${this.slot_name} Replicating ${table.qualifiedName} ${at}/${estimatedCount}`); lastLogIndex = at; } if (this.abort_signal.aborted) { @@ -490,7 +488,7 @@ WHERE oid = $1::regclass`, if (msg.tag == 'insert' || msg.tag == 'update' || msg.tag == 'delete') { const table = this.getTable(getRelId(msg.relation)); if (!table.syncAny) { - container.logger.debug(`Table ${table.qualifiedName} not used in sync rules - skipping`); + logger.debug(`Table ${table.qualifiedName} not used in sync rules - skipping`); return null; } @@ -585,7 +583,7 @@ WHERE oid = $1::regclass`, await this.ack(msg.lsn!, replicationStream); } else { if (count % 100 == 0) { - container.logger.info(`${this.slot_name} replicating op ${count} ${msg.lsn}`); + logger.info(`${this.slot_name} replicating op ${count} ${msg.lsn}`); } count += 1; diff --git a/packages/service-core/src/replication/WalStreamManager.ts b/packages/service-core/src/replication/WalStreamManager.ts index 21b4fcf33..10dc1c268 100644 --- a/packages/service-core/src/replication/WalStreamManager.ts +++ b/packages/service-core/src/replication/WalStreamManager.ts @@ -7,7 +7,7 @@ import * as util from '../util/util-index.js'; import { DefaultErrorRateLimiter } from './ErrorRateLimiter.js'; import { WalStreamRunner } from './WalStreamRunner.js'; import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js'; -import { container } from '@powersync/lib-services-framework'; +import { container, logger } from '@powersync/lib-services-framework'; // 5 minutes const PING_INTERVAL = 1_000_000_000n * 300n; @@ -36,7 +36,7 @@ export class WalStreamManager { start() { this.runLoop().catch((e) => { - container.logger.error(`Fatal WalStream error`, e); + logger.error(`Fatal WalStream error`, e); container.reporter.captureException(e); setTimeout(() => { process.exit(1); @@ -57,7 +57,7 @@ export class WalStreamManager { const configured_sync_rules = await util.loadSyncRules(this.system.config); let configured_lock: storage.ReplicationLock | undefined = undefined; if (configured_sync_rules != null) { - container.logger.info('Loading sync rules from configuration'); + logger.info('Loading sync rules from configuration'); try { // Configure new sync rules, if it has changed. // In that case, also immediately take out a lock, so that another process doesn't start replication on it. @@ -69,10 +69,10 @@ export class WalStreamManager { } } catch (e) { // Log, but continue with previous sync rules - container.logger.error(`Failed to load sync rules from configuration`, e); + logger.error(`Failed to load sync rules from configuration`, e); } } else { - container.logger.info('No sync rules configured - configure via API'); + logger.info('No sync rules configured - configure via API'); } while (!this.stopped) { await container.probes.touch(); @@ -92,7 +92,7 @@ export class WalStreamManager { } } } catch (e) { - container.logger.error(`Failed to refresh wal streams`, e); + logger.error(`Failed to refresh wal streams`, e); } await new Promise((resolve) => setTimeout(resolve, 5000)); } @@ -116,7 +116,7 @@ export class WalStreamManager { try { await db.query(`SELECT * FROM pg_logical_emit_message(false, 'powersync', 'ping')`); } catch (e) { - container.logger.warn(`Failed to ping`, e); + logger.warn(`Failed to ping`, e); } this.lastPing = now; } @@ -167,7 +167,7 @@ export class WalStreamManager { // for example from stricter validation that was added. // This will be retried every couple of seconds. // When new (valid) sync rules are deployed and processed, this one be disabled. - container.logger.error(`Failed to start replication for ${syncRules.slot_name}`, e); + logger.error(`Failed to start replication for ${syncRules.slot_name}`, e); } } } @@ -183,7 +183,7 @@ export class WalStreamManager { await stream.terminate(); } catch (e) { // This will be retried - container.logger.warn(`Failed to terminate ${stream.slot_name}`, e); + logger.warn(`Failed to terminate ${stream.slot_name}`, e); } } @@ -206,7 +206,7 @@ export class WalStreamManager { await lock.release(); } } catch (e) { - container.logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); + logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); } } } diff --git a/packages/service-core/src/replication/WalStreamRunner.ts b/packages/service-core/src/replication/WalStreamRunner.ts index 6d866f2de..ce3ff8759 100644 --- a/packages/service-core/src/replication/WalStreamRunner.ts +++ b/packages/service-core/src/replication/WalStreamRunner.ts @@ -6,7 +6,7 @@ import * as util from '../util/util-index.js'; import { ErrorRateLimiter } from './ErrorRateLimiter.js'; import { MissingReplicationSlotError, WalStream } from './WalStream.js'; import { ResolvedConnection } from '../util/config/types.js'; -import { container } from '@powersync/lib-services-framework'; +import { container, logger } from '@powersync/lib-services-framework'; export interface WalStreamRunnerOptions { factory: storage.BucketStorageFactory; @@ -51,7 +51,7 @@ export class WalStreamRunner { replication_slot: this.slot_name } }); - container.logger.error(`Replication failed on ${this.slot_name}`, e); + logger.error(`Replication failed on ${this.slot_name}`, e); if (e instanceof MissingReplicationSlotError) { // This stops replication on this slot, and creates a new slot @@ -96,7 +96,7 @@ export class WalStreamRunner { }); await stream.replicate(); } catch (e) { - container.logger.error(`Replication error`, e); + logger.error(`Replication error`, e); if (e.cause != null) { // Example: // PgError.conn_ended: Unable to do postgres query on ended connection @@ -118,7 +118,7 @@ export class WalStreamRunner { // [Symbol(pg.ErrorResponse)]: undefined // } // Without this additional log, the cause would not be visible in the logs. - container.logger.error(`cause`, e.cause); + logger.error(`cause`, e.cause); } if (e instanceof MissingReplicationSlotError) { throw e; @@ -144,7 +144,7 @@ export class WalStreamRunner { * This will also release the lock if start() was called earlier. */ async stop(options?: { force?: boolean }) { - container.logger.info(`${this.slot_name} Stopping replication`); + logger.info(`${this.slot_name} Stopping replication`); // End gracefully this.abortController.abort(); @@ -161,7 +161,7 @@ export class WalStreamRunner { * Stops replication if needed. */ async terminate(options?: { force?: boolean }) { - container.logger.info(`${this.slot_name} Terminating replication`); + logger.info(`${this.slot_name} Terminating replication`); await this.stop(options); const slotName = this.slot_name; diff --git a/packages/service-core/src/routes/endpoints/socket-route.ts b/packages/service-core/src/routes/endpoints/socket-route.ts index 576a58735..c2c89a407 100644 --- a/packages/service-core/src/routes/endpoints/socket-route.ts +++ b/packages/service-core/src/routes/endpoints/socket-route.ts @@ -1,6 +1,6 @@ import { serialize } from 'bson'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; -import { container, errors, schema } from '@powersync/lib-services-framework'; +import { errors, logger, schema } from '@powersync/lib-services-framework'; import * as util from '../../util/util-index.js'; import { streamResponse } from '../../sync/sync.js'; @@ -123,7 +123,7 @@ export const syncStreamReactive: SocketRouteGenerator = (router) => // Convert to our standard form before responding. // This ensures the error can be serialized. const error = new errors.InternalServerError(ex); - container.logger.error('Sync stream error', error); + logger.error('Sync stream error', error); responder.onError(error); } finally { responder.onComplete(); diff --git a/packages/service-core/src/routes/endpoints/sync-stream.ts b/packages/service-core/src/routes/endpoints/sync-stream.ts index c7e30eedc..6b778d757 100644 --- a/packages/service-core/src/routes/endpoints/sync-stream.ts +++ b/packages/service-core/src/routes/endpoints/sync-stream.ts @@ -1,6 +1,6 @@ import { Readable } from 'stream'; import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules'; -import { container, errors, router, schema } from '@powersync/lib-services-framework'; +import { errors, logger, router, schema } from '@powersync/lib-services-framework'; import * as sync from '../../sync/sync-index.js'; import * as util from '../../util/util-index.js'; @@ -76,7 +76,7 @@ export const syncStreamed = routeDefinition({ controller.abort(); // Note: This appears as a 200 response in the logs. if (error.message != 'Shutting down system') { - container.logger.error('Streaming sync request failed', error); + logger.error('Streaming sync request failed', error); } }); diff --git a/packages/service-core/src/routes/hooks.ts b/packages/service-core/src/routes/hooks.ts index 2c4a641fb..4d3f3e322 100644 --- a/packages/service-core/src/routes/hooks.ts +++ b/packages/service-core/src/routes/hooks.ts @@ -1,7 +1,6 @@ import type fastify from 'fastify'; import a from 'async'; - -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; export type CreateRequestQueueParams = { max_queue_depth: number; @@ -22,7 +21,7 @@ export const createRequestQueueHook = (params: CreateRequestQueueParams): fastif (params.max_queue_depth == 0 && request_queue.running() == params.concurrency) || (params.max_queue_depth > 0 && request_queue.length() >= params.max_queue_depth) ) { - container.logger.warn(`${request.method} ${request.url}`, { + logger.warn(`${request.method} ${request.url}`, { status: 429, method: request.method, path: request.url, diff --git a/packages/service-core/src/routes/route-register.ts b/packages/service-core/src/routes/route-register.ts index df95753d3..3c6acb16c 100644 --- a/packages/service-core/src/routes/route-register.ts +++ b/packages/service-core/src/routes/route-register.ts @@ -1,6 +1,6 @@ import fastify from 'fastify'; -import { container, errors, router, HTTPMethod } from '@powersync/lib-services-framework'; +import { errors, router, HTTPMethod, logger } from '@powersync/lib-services-framework'; import { Context, ContextProvider, RequestEndpoint, RequestEndpointHandlerPayload } from './router.js'; export type FastifyEndpoint = RequestEndpoint & { @@ -83,7 +83,7 @@ export function registerFastifyRoutes( await reply.send(response.data); } finally { await response.afterSend?.(); - container.logger.info(`${e.method} ${request.url}`, { + logger.info(`${e.method} ${request.url}`, { duration_ms: Math.round(new Date().valueOf() - startTime.valueOf() + Number.EPSILON), status: response.status, method: e.method, diff --git a/packages/service-core/src/runner/teardown.ts b/packages/service-core/src/runner/teardown.ts index a964fa2f7..dbf3a3621 100644 --- a/packages/service-core/src/runner/teardown.ts +++ b/packages/service-core/src/runner/teardown.ts @@ -5,12 +5,11 @@ import * as timers from 'timers/promises'; -import { container } from '@powersync/lib-services-framework'; - import * as db from '../db/db-index.js'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; import * as replication from '../replication/replication-index.js'; +import { logger } from '@powersync/lib-services-framework'; /** * Attempt to terminate a single sync rules instance. @@ -35,9 +34,9 @@ async function terminateReplicator( lock }); - container.logger.info(`Terminating replication slot ${stream.slot_name}`); + logger.info(`Terminating replication slot ${stream.slot_name}`); await stream.terminate(); - container.logger.info(`Terminated replication slot ${stream.slot_name}`); + logger.info(`Terminated replication slot ${stream.slot_name}`); } finally { await lock.release(); } @@ -65,7 +64,7 @@ async function terminateReplicators( } catch (e) { retry = true; console.error(e); - container.logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); + logger.warn(`Failed to terminate ${syncRules.slot_name}`, e); } } if (!retry) { @@ -81,22 +80,22 @@ export async function teardown(runnerConfig: utils.RunnerConfig) { const config = await utils.loadConfig(runnerConfig); const mongoDB = storage.createPowerSyncMongo(config.storage); try { - container.logger.info(`Waiting for auth`); + logger.info(`Waiting for auth`); await db.mongo.waitForAuth(mongoDB.db); const bucketStorage = new storage.MongoBucketStorage(mongoDB, { slot_name_prefix: config.slot_name_prefix }); const connection = config.connection; - container.logger.info(`Terminating replication slots`); + logger.info(`Terminating replication slots`); if (connection) { await terminateReplicators(bucketStorage, connection); } const database = mongoDB.db; - container.logger.info(`Dropping database ${database.namespace}`); + logger.info(`Dropping database ${database.namespace}`); await database.dropDatabase(); - container.logger.info(`Done`); + logger.info(`Done`); await mongoDB.client.close(); // If there was an error connecting to postgress, the process may stay open indefinitely. @@ -104,7 +103,7 @@ export async function teardown(runnerConfig: utils.RunnerConfig) { // We do not consider those errors a teardown failure. process.exit(0); } catch (e) { - container.logger.error(`Teardown failure`, e); + logger.error(`Teardown failure`, e); await mongoDB.client.close(); process.exit(1); } diff --git a/packages/service-core/src/storage/MongoBucketStorage.ts b/packages/service-core/src/storage/MongoBucketStorage.ts index 1fab35d78..de7a9901a 100644 --- a/packages/service-core/src/storage/MongoBucketStorage.ts +++ b/packages/service-core/src/storage/MongoBucketStorage.ts @@ -24,7 +24,7 @@ import { PowerSyncMongo, PowerSyncMongoOptions } from './mongo/db.js'; import { SyncRuleDocument, SyncRuleState } from './mongo/models.js'; import { generateSlotName } from './mongo/util.js'; import { v4 as uuid } from 'uuid'; -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; export interface MongoBucketStorageOptions extends PowerSyncMongoOptions {} @@ -74,13 +74,13 @@ export class MongoBucketStorage implements BucketStorageFactory { const active = await this.getActiveSyncRulesContent(); if (next?.sync_rules_content == sync_rules) { - container.logger.info('Sync rules from configuration unchanged'); + logger.info('Sync rules from configuration unchanged'); return { updated: false }; } else if (next == null && active?.sync_rules_content == sync_rules) { - container.logger.info('Sync rules from configuration unchanged'); + logger.info('Sync rules from configuration unchanged'); return { updated: false }; } else { - container.logger.info('Sync rules updated from configuration'); + logger.info('Sync rules updated from configuration'); const persisted_sync_rules = await this.updateSyncRules({ content: sync_rules, lock: options?.lock diff --git a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts index 39de47982..400604659 100644 --- a/packages/service-core/src/storage/mongo/MongoBucketBatch.ts +++ b/packages/service-core/src/storage/mongo/MongoBucketBatch.ts @@ -4,7 +4,7 @@ import * as mongo from 'mongodb'; import * as util from '../../util/util-index.js'; import * as replication from '../../replication/replication-index.js'; -import { container, errors } from '@powersync/lib-services-framework'; +import { container, errors, logger } from '@powersync/lib-services-framework'; import { BucketStorageBatch, FlushedResult, mergeToast, SaveOptions } from '../BucketStorage.js'; import { SourceTable } from '../SourceTable.js'; import { PowerSyncMongo } from './db.js'; @@ -345,7 +345,7 @@ export class MongoBucketBatch implements BucketStorageBatch { } } ); - container.logger.error( + logger.error( `Failed to evaluate data query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}` ); } @@ -385,7 +385,7 @@ export class MongoBucketBatch implements BucketStorageBatch { } } ); - container.logger.error( + logger.error( `Failed to evaluate parameter query on ${record.sourceTable.qualifiedName}.${after.id}: ${error.error}` ); } @@ -439,7 +439,7 @@ export class MongoBucketBatch implements BucketStorageBatch { if (e instanceof mongo.MongoError && e.hasErrorLabel('TransientTransactionError')) { // Likely write conflict caused by concurrent write stream replicating } else { - container.logger.warn('Transaction error', e as Error); + logger.warn('Transaction error', e as Error); } await new Promise((resolve) => setTimeout(resolve, Math.random() * 50)); throw e; @@ -464,7 +464,7 @@ export class MongoBucketBatch implements BucketStorageBatch { await this.withTransaction(async () => { flushTry += 1; if (flushTry % 10 == 0) { - container.logger.info(`${this.slot_name} ${description} - try ${flushTry}`); + logger.info(`${this.slot_name} ${description} - try ${flushTry}`); } if (flushTry > 20 && Date.now() > lastTry) { throw new Error('Max transaction tries exceeded'); @@ -529,13 +529,11 @@ export class MongoBucketBatch implements BucketStorageBatch { if (this.last_checkpoint_lsn != null && lsn <= this.last_checkpoint_lsn) { // When re-applying transactions, don't create a new checkpoint until // we are past the last transaction. - container.logger.info(`Re-applied transaction ${lsn} - skipping checkpoint`); + logger.info(`Re-applied transaction ${lsn} - skipping checkpoint`); return false; } if (lsn < this.no_checkpoint_before_lsn) { - container.logger.info( - `Waiting until ${this.no_checkpoint_before_lsn} before creating checkpoint, currently at ${lsn}` - ); + logger.info(`Waiting until ${this.no_checkpoint_before_lsn} before creating checkpoint, currently at ${lsn}`); return false; } @@ -599,7 +597,7 @@ export class MongoBucketBatch implements BucketStorageBatch { } async save(record: SaveOptions): Promise { - container.logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`); + logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`); this.batch ??= new OperationBatch(); this.batch.push(new RecordOperation(record)); diff --git a/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts b/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts index cb7a6c31a..5a9711ab3 100644 --- a/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts +++ b/packages/service-core/src/storage/mongo/MongoSyncRulesLock.ts @@ -2,7 +2,7 @@ import crypto from 'crypto'; import { PersistedSyncRulesContent, ReplicationLock } from '../BucketStorage.js'; import { PowerSyncMongo } from './db.js'; -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; /** * Manages a lock on a sync rules document, so that only one process @@ -40,7 +40,7 @@ export class MongoSyncRulesLock implements ReplicationLock { try { await this.refresh(); } catch (e) { - container.logger.error('Failed to refresh lock', e); + logger.error('Failed to refresh lock', e); clearInterval(this.refreshInterval); } }, 30_130); @@ -59,7 +59,7 @@ export class MongoSyncRulesLock implements ReplicationLock { ); if (result.modifiedCount == 0) { // Log and ignore - container.logger.warn(`Lock already released: ${this.sync_rules_id}/${this.lock_id}`); + logger.warn(`Lock already released: ${this.sync_rules_id}/${this.lock_id}`); } } diff --git a/packages/service-core/src/storage/mongo/PersistedBatch.ts b/packages/service-core/src/storage/mongo/PersistedBatch.ts index 22275441f..486c9d800 100644 --- a/packages/service-core/src/storage/mongo/PersistedBatch.ts +++ b/packages/service-core/src/storage/mongo/PersistedBatch.ts @@ -16,7 +16,7 @@ import { SourceKey } from './models.js'; import { serializeLookup } from './util.js'; -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; /** * Maximum size of operations we write in a single transaction. @@ -253,7 +253,7 @@ export class PersistedBatch { }); } - container.logger.info( + logger.info( `powersync_${this.group_id} Flushed ${this.bucketData.length} + ${this.bucketParameters.length} + ${ this.currentData.length } updates, ${Math.round(this.currentSize / 1024)}kb. Last op_id: ${this.debugLastOpId}` diff --git a/packages/service-core/src/sync/sync.ts b/packages/service-core/src/sync/sync.ts index 61e398364..795ae2dac 100644 --- a/packages/service-core/src/sync/sync.ts +++ b/packages/service-core/src/sync/sync.ts @@ -1,6 +1,5 @@ import { JSONBig, JsonContainer } from '@powersync/service-jsonbig'; import { SyncParameters } from '@powersync/service-sync-rules'; -import { container } from '@powersync/lib-services-framework'; import { Semaphore } from 'async-mutex'; import { AbortError } from 'ix/aborterror.js'; @@ -11,6 +10,7 @@ import * as util from '../util/util-index.js'; import { mergeAsyncIterables } from './merge.js'; import { TokenStreamOptions, tokenStream } from './util.js'; import { Metrics } from '../metrics/Metrics.js'; +import { logger } from '@powersync/lib-services-framework'; /** * Maximum number of connections actively fetching data. @@ -141,7 +141,7 @@ async function* streamResponseInner( message += `buckets: ${allBuckets.length} | `; message += `updated: ${limitedBuckets(diff.updatedBuckets, 20)} | `; message += `removed: ${limitedBuckets(diff.removedBuckets, 20)} | `; - container.logger.info(message); + logger.info(message); const checksum_line: util.StreamingSyncCheckpointDiff = { checkpoint_diff: { @@ -156,7 +156,7 @@ async function* streamResponseInner( } else { let message = `New checkpoint: ${checkpoint} | write: ${writeCheckpoint} | `; message += `buckets: ${allBuckets.length} ${limitedBuckets(allBuckets, 20)}`; - container.logger.info(message); + logger.info(message); bucketsToFetch = allBuckets; const checksum_line: util.StreamingSyncCheckpoint = { checkpoint: { @@ -246,7 +246,7 @@ async function* bucketDataBatch(request: BucketDataRequest) { if (r.data.length == 0) { continue; } - container.logger.debug(`Sending data for ${r.bucket}`); + logger.debug(`Sending data for ${r.bucket}`); let send_data: any; if (binary_data) { diff --git a/packages/service-core/src/system/CorePowerSyncSystem.ts b/packages/service-core/src/system/CorePowerSyncSystem.ts index 002adf782..8c37f137a 100644 --- a/packages/service-core/src/system/CorePowerSyncSystem.ts +++ b/packages/service-core/src/system/CorePowerSyncSystem.ts @@ -1,5 +1,5 @@ import * as pgwire from '@powersync/service-jpgwire'; -import { LifeCycledSystem, container } from '@powersync/lib-services-framework'; +import { LifeCycledSystem, container, logger } from '@powersync/lib-services-framework'; import * as storage from '../storage/storage-index.js'; import * as utils from '../util/util-index.js'; @@ -36,7 +36,7 @@ export abstract class CorePowerSyncSystem extends LifeCycledSystem { // Note: This does not work well when streaming requests are queued. In that case, the server still doesn't // close in the 30-second timeout. this.closed = true; - container.logger.info(`Closing ${this.stopHandlers.size} streams`); + logger.info(`Closing ${this.stopHandlers.size} streams`); for (let handler of this.stopHandlers) { handler(); } diff --git a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts index 66ca65f63..58443d848 100644 --- a/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts +++ b/packages/service-core/src/util/config/collectors/impl/filesystem-config-collector.ts @@ -1,9 +1,9 @@ import * as fs from 'fs/promises'; import * as path from 'path'; -import { container } from '@powersync/lib-services-framework'; import { ConfigCollector, ConfigFileFormat } from '../config-collector.js'; import { RunnerConfig } from '../../types.js'; +import { logger } from '@powersync/lib-services-framework'; export class FileSystemConfigCollector extends ConfigCollector { get name(): string { @@ -25,7 +25,7 @@ export class FileSystemConfigCollector extends ConfigCollector { throw new Error(`Config file path ${resolvedPath} was specified, but the file does not exist.`); } - container.logger.info(`Collecting PowerSync configuration from File: ${resolvedPath}`); + logger.info(`Collecting PowerSync configuration from File: ${resolvedPath}`); const content = await fs.readFile(resolvedPath, 'utf-8'); let contentType: ConfigFileFormat | undefined; diff --git a/packages/service-core/src/util/config/compound-config-collector.ts b/packages/service-core/src/util/config/compound-config-collector.ts index 3be9ae56d..3ae856604 100644 --- a/packages/service-core/src/util/config/compound-config-collector.ts +++ b/packages/service-core/src/util/config/compound-config-collector.ts @@ -9,7 +9,7 @@ import { Base64SyncRulesCollector } from './sync-rules/impl/base64-sync-rules-co import { InlineSyncRulesCollector } from './sync-rules/impl/inline-sync-rules-collector.js'; import { FileSystemSyncRulesCollector } from './sync-rules/impl/filesystem-sync-rules-collector.js'; import { FallbackConfigCollector } from './collectors/impl/fallback-config-collector.js'; -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; const POWERSYNC_DEV_KID = 'powersync-dev'; @@ -140,7 +140,7 @@ export class CompoundConfigCollector { if (baseConfig) { return baseConfig; } - container.logger.debug( + logger.debug( `Could not collect PowerSync config with ${collector.name} method. Moving on to next method if available.` ); } catch (ex) { @@ -161,7 +161,7 @@ export class CompoundConfigCollector { if (config) { return config; } - container.logger.debug( + logger.debug( `Could not collect sync rules with ${collector.name} method. Moving on to next method if available.` ); } catch (ex) { diff --git a/packages/service-core/src/util/memory-tracking.ts b/packages/service-core/src/util/memory-tracking.ts index 5bc3121ee..31d8dd895 100644 --- a/packages/service-core/src/util/memory-tracking.ts +++ b/packages/service-core/src/util/memory-tracking.ts @@ -1,4 +1,4 @@ -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; /** * Track and log memory usage. @@ -57,7 +57,7 @@ export function trackMemoryUsage() { ) )`.replaceAll(/\s+/g, ' '); - container.logger.info(output); + logger.info(output); } }, 50); } diff --git a/packages/service-core/src/util/pgwire_utils.ts b/packages/service-core/src/util/pgwire_utils.ts index e0b1c7622..9aa042980 100644 --- a/packages/service-core/src/util/pgwire_utils.ts +++ b/packages/service-core/src/util/pgwire_utils.ts @@ -6,7 +6,7 @@ import * as pgwire from '@powersync/service-jpgwire'; import { SqliteJsonValue, SqliteRow, ToastableSqliteRow, toSyncRulesRow } from '@powersync/service-sync-rules'; import * as replication from '../replication/replication-index.js'; -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; /** * pgwire message -> SQLite row. @@ -133,7 +133,7 @@ export async function retriedQuery(db: pgwire.PgClient, ...args: any[]) { if (tries == 1) { throw e; } - container.logger.warn('Query error, retrying', e); + logger.warn('Query error, retrying', e); } } } diff --git a/packages/service-core/src/util/utils.ts b/packages/service-core/src/util/utils.ts index 65f5208c5..5e45fed5b 100644 --- a/packages/service-core/src/util/utils.ts +++ b/packages/service-core/src/util/utils.ts @@ -5,7 +5,7 @@ import { pgwireRows } from '@powersync/service-jpgwire'; import * as storage from '../storage/storage-index.js'; import { BucketChecksum, OpId } from './protocol-types.js'; import { retriedQuery } from './pgwire_utils.js'; -import { container } from '@powersync/lib-services-framework'; +import { logger } from '@powersync/lib-services-framework'; export type ChecksumMap = Map; @@ -90,14 +90,14 @@ export async function getClientCheckpoint( const timeout = options?.timeout ?? 50_000; - container.logger.info(`Waiting for LSN checkpoint: ${lsn}`); + logger.info(`Waiting for LSN checkpoint: ${lsn}`); while (Date.now() - start < timeout) { const cp = await bucketStorage.getActiveCheckpoint(); if (!cp.hasSyncRules()) { throw new Error('No sync rules available'); } if (cp.lsn >= lsn) { - container.logger.info(`Got write checkpoint: ${lsn} : ${cp.checkpoint}`); + logger.info(`Got write checkpoint: ${lsn} : ${cp.checkpoint}`); return cp.checkpoint; } @@ -117,6 +117,6 @@ export async function createWriteCheckpoint( ); const id = await bucketStorage.createWriteCheckpoint(user_id, { '1': lsn }); - container.logger.info(`Write checkpoint 2: ${JSON.stringify({ lsn, id: String(id) })}`); + logger.info(`Write checkpoint 2: ${JSON.stringify({ lsn, id: String(id) })}`); return id; } diff --git a/service/src/entry.ts b/service/src/entry.ts index 8bb9d3f8a..13d536b23 100644 --- a/service/src/entry.ts +++ b/service/src/entry.ts @@ -1,13 +1,4 @@ -import winston from 'winston'; import { entry, utils } from '@powersync/service-core'; -import { container, Logger } from '@powersync/lib-services-framework'; - -// Configure logging to console -container.logger.configure({ - format: utils.env.NODE_ENV == 'production' ? Logger.production_format : Logger.development_format, - transports: [new winston.transports.Console()] -}); - import { startServer } from './runners/server.js'; import { startStreamWorker } from './runners/stream-worker.js'; diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index d2d4bd102..bf453989e 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -2,7 +2,7 @@ import { deserialize } from 'bson'; import fastify from 'fastify'; import cors from '@fastify/cors'; import * as core from '@powersync/service-core'; -import { container, errors } from '@powersync/lib-services-framework'; +import { container, errors, logger } from '@powersync/lib-services-framework'; import { RSocketRequestMeta } from '@powersync/service-rsocket-router'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; @@ -11,13 +11,11 @@ import { SocketRouter } from '../routes/router.js'; * Starts an API server */ export async function startServer(runnerConfig: core.utils.RunnerConfig) { - container.logger.info('Booting'); + logger.info('Booting'); const config = await core.utils.loadConfig(runnerConfig); const system = new PowerSyncSystem(config); - const { logger } = container; - const server = fastify.fastify(); // Create a separate context for concurrency queueing @@ -97,7 +95,7 @@ export async function startServer(runnerConfig: core.utils.RunnerConfig) { }; } } catch (ex) { - container.logger.error(ex); + logger.error(ex); } return { diff --git a/service/src/runners/stream-worker.ts b/service/src/runners/stream-worker.ts index 3a5cb61d9..96efef381 100644 --- a/service/src/runners/stream-worker.ts +++ b/service/src/runners/stream-worker.ts @@ -1,11 +1,9 @@ import { migrations, replication, utils, Metrics } from '@powersync/service-core'; -import { container } from '@powersync/lib-services-framework'; +import { container, logger } from '@powersync/lib-services-framework'; import { PowerSyncSystem } from '../system/PowerSyncSystem.js'; export async function startStreamWorker(runnerConfig: utils.RunnerConfig) { - const { logger } = container; - logger.info('Booting'); const config = await utils.loadConfig(runnerConfig); diff --git a/service/src/util/alerting.ts b/service/src/util/alerting.ts index b075c9161..7b40d5a61 100644 --- a/service/src/util/alerting.ts +++ b/service/src/util/alerting.ts @@ -1,7 +1,7 @@ import * as sentry_types from '@sentry/types'; import * as sentry from '@sentry/node'; import { utils } from '@powersync/service-core'; -import { container, ErrorReporter } from '@powersync/lib-services-framework'; +import { ErrorReporter, logger } from '@powersync/lib-services-framework'; // Generally ignore errors that are due to configuration issues, rather than // service bugs. @@ -31,9 +31,7 @@ export const createSentryReporter = (opts?: { beforeSend: opts?.beforeSend ? opts.beforeSend : undefined }); } else { - container.logger.debug( - 'Alerts configured with sentry reporter but no SENTRY_DSN environment variable has been set' - ); + logger.debug('Alerts configured with sentry reporter but no SENTRY_DSN environment variable has been set'); } return { From 8d396dc3d014aef3214b2b6f4ab5a484913a682b Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 14:38:19 +0200 Subject: [PATCH 25/36] update readme --- DEVELOP.md | 4 +++- README.md | 9 +++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/DEVELOP.md b/DEVELOP.md index dbf443d7f..fb88dde74 100644 --- a/DEVELOP.md +++ b/DEVELOP.md @@ -85,4 +85,6 @@ This repository uses Changesets. Add changesets to changed packages before mergi changeset add ``` -Merging a PR with changeset files will automatically create a release PR. Merging the release PR will bump versions, tag and publish packages and the Docker image. The Docker image version is extracted from the `./service/package.json` `version` field. +Merging a PR with changeset files will automatically create a release PR. Merging the release PR will bump versions, tag and publish packages. + +The Docker image is published by manually triggering the `Docker Image Release` Github Action. The Docker image version is extracted from the `./service/package.json` `version` field. diff --git a/README.md b/README.md index 4df791c63..8f3fe54db 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@

-*[PowerSync](https://www.powersync.com) is a Postgres-SQLite sync layer, which helps developers to create local-first real-time reactive apps that work seamlessly both online and offline.* +_[PowerSync](https://www.powersync.com) is a Postgres-SQLite sync layer, which helps developers to create local-first real-time reactive apps that work seamlessly both online and offline._ # PowerSync Service @@ -35,6 +35,12 @@ The service can be started using the public Docker image. See the image [notes]( - [packages/types](./packages/types/README.md) - Type definitions for the PowerSync service +## Monorepo Structure: Libraries + +- [libs/lib-services](./libs/lib-services/README.md) + + - A light-weight set of definitions and utilities for micro services + ## Service The PowerSync service code is located in the `service` folder. This project is used to build the `journeyapps/powersync-service` Docker image. @@ -42,4 +48,3 @@ The PowerSync service code is located in the `service` folder. This project is u # Notes This mono repo currently relies on `restricted` packages. Currently this repo can only be built in CI. These dependencies will be removed soon. - From 1ba070be294aa7c6a0a07d645e704846eace34d7 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 15:54:19 +0200 Subject: [PATCH 26/36] register sentry reporter --- libs/lib-services/src/utils/environment-variables.ts | 4 ---- packages/service-core/src/routes/endpoints/sync-rules.ts | 2 +- packages/service-core/src/runner/teardown.ts | 2 -- service/src/entry.ts | 4 ++++ 4 files changed, 5 insertions(+), 7 deletions(-) diff --git a/libs/lib-services/src/utils/environment-variables.ts b/libs/lib-services/src/utils/environment-variables.ts index 8bbeb11aa..c7717133a 100644 --- a/libs/lib-services/src/utils/environment-variables.ts +++ b/libs/lib-services/src/utils/environment-variables.ts @@ -48,10 +48,6 @@ export const collectEnvironmentVariablesFromSchema = ( throw new Error('Invalid or missing environment variables'); } - if (result.data.DEV_MODE_DO_NOT_ENABLE_IN_PRODUCTION_OR_YOU_WILL_BE_FIRED) { - console.log('DEV MODE. DO NOT ENABLE IN PRODUCTION'); - } - return result.data; }; diff --git a/packages/service-core/src/routes/endpoints/sync-rules.ts b/packages/service-core/src/routes/endpoints/sync-rules.ts index 916f9ca22..a5eedfb56 100644 --- a/packages/service-core/src/routes/endpoints/sync-rules.ts +++ b/packages/service-core/src/routes/endpoints/sync-rules.ts @@ -1,5 +1,5 @@ import * as t from 'ts-codec'; -import { FastifyPluginAsync } from 'fastify'; +import type { FastifyPluginAsync } from 'fastify'; import * as pgwire from '@powersync/service-jpgwire'; import { errors, router, schema } from '@powersync/lib-services-framework'; import { SqlSyncRules, SyncRulesErrors } from '@powersync/service-sync-rules'; diff --git a/packages/service-core/src/runner/teardown.ts b/packages/service-core/src/runner/teardown.ts index dbf3a3621..8060a4047 100644 --- a/packages/service-core/src/runner/teardown.ts +++ b/packages/service-core/src/runner/teardown.ts @@ -74,8 +74,6 @@ async function terminateReplicators( } } -// TODO should there be a global context for things like alerting? - export async function teardown(runnerConfig: utils.RunnerConfig) { const config = await utils.loadConfig(runnerConfig); const mongoDB = storage.createPowerSyncMongo(config.storage); diff --git a/service/src/entry.ts b/service/src/entry.ts index 13d536b23..d69042972 100644 --- a/service/src/entry.ts +++ b/service/src/entry.ts @@ -1,6 +1,10 @@ import { entry, utils } from '@powersync/service-core'; import { startServer } from './runners/server.js'; import { startStreamWorker } from './runners/stream-worker.js'; +import { container, ContainerImplementation } from '@powersync/lib-services-framework'; +import { createSentryReporter } from './util/alerting.js'; + +container.register(ContainerImplementation.REPORTER, createSentryReporter()); // Generate Commander CLI entry point program const { execute } = entry.generateEntryProgram({ From 43516e84fbecd98a71524b3b81a5e59f2e1eefcc Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 16:19:27 +0200 Subject: [PATCH 27/36] cleanup template --- service/local-dev/powersync-template.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/service/local-dev/powersync-template.yaml b/service/local-dev/powersync-template.yaml index 9d9e5d5c2..47d6819b0 100644 --- a/service/local-dev/powersync-template.yaml +++ b/service/local-dev/powersync-template.yaml @@ -3,7 +3,7 @@ # See https://github.com/powersync-ja/powersync-service/blob/c6140e883a4a2ac9c8c2e46b7c31ad38e1c6d28a/packages/types/src/config/PowerSyncConfig.ts#L95 migrations: - disable_auto_migrate: true + disable_auto_migration: false replication: connections: From dc66d42b4f307207dbb6f416274da821ae37aea7 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 18:18:18 +0200 Subject: [PATCH 28/36] add ability to register default container implementations --- libs/lib-services/src/container.ts | 47 ++++++++++++++++++++++++------ packages/service-core/src/index.ts | 3 ++ service/src/entry.ts | 1 + 3 files changed, 42 insertions(+), 9 deletions(-) diff --git a/libs/lib-services/src/container.ts b/libs/lib-services/src/container.ts index a26b79347..0448a9f0d 100644 --- a/libs/lib-services/src/container.ts +++ b/libs/lib-services/src/container.ts @@ -1,3 +1,4 @@ +import _ from 'lodash'; import { ErrorReporter } from './alerts/definitions.js'; import { NoOpReporter } from './alerts/no-op-reporter.js'; import { ProbeModule, TerminationHandler, createFSProbe, createTerminationHandler } from './signals/signals-index.js'; @@ -14,36 +15,64 @@ export type ContainerImplementationTypes = { [ContainerImplementation.TERMINATION_HANDLER]: TerminationHandler; }; +export type RegisterDefaultsOptions = { + skip?: ContainerImplementation[]; +}; + +export type ContainerImplementationDefaultGenerators = { + [type in ContainerImplementation]: () => ContainerImplementationTypes[type]; +}; + +const DEFAULT_GENERATORS: ContainerImplementationDefaultGenerators = { + [ContainerImplementation.REPORTER]: () => NoOpReporter, + [ContainerImplementation.PROBES]: () => createFSProbe(), + [ContainerImplementation.TERMINATION_HANDLER]: () => createTerminationHandler() +}; + export class Container { - protected implementations: ContainerImplementationTypes; + protected implementations: Partial; /** * Manager for system health probes */ get probes() { - return this.implementations[ContainerImplementation.PROBES]; + return this.getImplementation(ContainerImplementation.PROBES); } /** * Error reporter. Defaults to a no-op reporter */ get reporter() { - return this.implementations[ContainerImplementation.REPORTER]; + return this.getImplementation(ContainerImplementation.REPORTER); } /** * Handler for termination of the Node process */ get terminationHandler() { - return this.implementations[ContainerImplementation.TERMINATION_HANDLER]; + return this.getImplementation(ContainerImplementation.TERMINATION_HANDLER); } constructor() { - this.implementations = { - [ContainerImplementation.REPORTER]: NoOpReporter, - [ContainerImplementation.PROBES]: createFSProbe(), - [ContainerImplementation.TERMINATION_HANDLER]: createTerminationHandler() - }; + this.implementations = {}; + } + + getImplementation(type: Type) { + const implementation = this.implementations[type]; + if (!implementation) { + throw new Error(`Implementation for ${type} has not been registered.`); + } + return implementation; + } + + /** + * Registers default implementations + */ + registerDefaults(options?: RegisterDefaultsOptions) { + _.difference(Object.values(ContainerImplementation), options?.skip ?? []).forEach((type) => { + const generator = DEFAULT_GENERATORS[type]; + this.implementations[type] = generator() as any; // :( + }); } /** diff --git a/packages/service-core/src/index.ts b/packages/service-core/src/index.ts index 1b88bbf98..b82d5d731 100644 --- a/packages/service-core/src/index.ts +++ b/packages/service-core/src/index.ts @@ -12,6 +12,9 @@ export * as db from './db/db-index.js'; export * from './entry/entry-index.js'; export * as entry from './entry/entry-index.js'; +// Re-export framework for easy use of Container API +export * as framework from '@powersync/lib-services-framework'; + export * from './metrics/Metrics.js'; export * as metrics from './metrics/Metrics.js'; diff --git a/service/src/entry.ts b/service/src/entry.ts index d69042972..ae57cccd7 100644 --- a/service/src/entry.ts +++ b/service/src/entry.ts @@ -4,6 +4,7 @@ import { startStreamWorker } from './runners/stream-worker.js'; import { container, ContainerImplementation } from '@powersync/lib-services-framework'; import { createSentryReporter } from './util/alerting.js'; +container.registerDefaults(); container.register(ContainerImplementation.REPORTER, createSentryReporter()); // Generate Commander CLI entry point program From 761e27130b3e62cb8ad7d30e7a75ae3ef639aee6 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 18:23:13 +0200 Subject: [PATCH 29/36] fix tests --- packages/service-core/test/src/setup.ts | 7 +++++++ packages/service-core/vitest.config.ts | 5 ++++- 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 packages/service-core/test/src/setup.ts diff --git a/packages/service-core/test/src/setup.ts b/packages/service-core/test/src/setup.ts new file mode 100644 index 000000000..8e4ece4c9 --- /dev/null +++ b/packages/service-core/test/src/setup.ts @@ -0,0 +1,7 @@ +import { container } from '@powersync/lib-services-framework'; +import { beforeAll } from 'vitest'; + +beforeAll(() => { + // Your setup code here + container.registerDefaults(); +}); diff --git a/packages/service-core/vitest.config.ts b/packages/service-core/vitest.config.ts index 6b7a908fc..b392696b7 100644 --- a/packages/service-core/vitest.config.ts +++ b/packages/service-core/vitest.config.ts @@ -2,5 +2,8 @@ import { defineConfig } from 'vitest/config'; import tsconfigPaths from 'vite-tsconfig-paths'; export default defineConfig({ - plugins: [tsconfigPaths()] + plugins: [tsconfigPaths()], + test: { + setupFiles: './test/src/setup.ts' + } }); From 40186a303930664ba816f7099967a8403699f081 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 18:50:38 +0200 Subject: [PATCH 30/36] restore auth for publish actions --- .github/workflows/development_packages_release.yaml | 4 ++++ .github/workflows/packages_release.yaml | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/.github/workflows/development_packages_release.yaml b/.github/workflows/development_packages_release.yaml index ccafb969d..9b67b4cee 100644 --- a/.github/workflows/development_packages_release.yaml +++ b/.github/workflows/development_packages_release.yaml @@ -40,6 +40,10 @@ jobs: restore-keys: | ${{ runner.os }}-pnpm-store- + - name: Add NPM auth + run: | + echo "//registry.npmjs.org/:_authToken=${{secrets.NPM_TOKEN}}" >> ~/.npmrc + - name: Install dependencies run: pnpm install diff --git a/.github/workflows/packages_release.yaml b/.github/workflows/packages_release.yaml index 01339799c..a7c73ce04 100644 --- a/.github/workflows/packages_release.yaml +++ b/.github/workflows/packages_release.yaml @@ -39,6 +39,10 @@ jobs: restore-keys: | ${{ runner.os }}-pnpm-store- + - name: Add NPM auth + run: | + echo "//registry.npmjs.org/:_authToken=${{secrets.NPM_TOKEN}}" >> ~/.npmrc + - name: Install dependencies run: pnpm install From 226e19b2b994a914bcad67729e47b27a1d8bfc7d Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 18:53:09 +0200 Subject: [PATCH 31/36] framework should be public --- libs/lib-services/package.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/libs/lib-services/package.json b/libs/lib-services/package.json index 122c7a78b..5ec88b93c 100644 --- a/libs/lib-services/package.json +++ b/libs/lib-services/package.json @@ -9,6 +9,9 @@ "files": [ "dist/**/*" ], + "publishConfig": { + "access": "public" + }, "scripts": { "clean": "rm -r ./dist && tsc -b --clean", "build": "tsc -b", From 2656e1da4f3cc19dc309b44ce511ea6d84d1eab6 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Thu, 20 Jun 2024 19:33:06 +0200 Subject: [PATCH 32/36] fix incorrect DB used for migrations --- packages/service-core/src/migrations/migrations.ts | 2 +- .../service-core/src/migrations/store/migration-store.ts | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/service-core/src/migrations/migrations.ts b/packages/service-core/src/migrations/migrations.ts index 572b622d6..e116b2196 100644 --- a/packages/service-core/src/migrations/migrations.ts +++ b/packages/service-core/src/migrations/migrations.ts @@ -95,7 +95,7 @@ export const migrate = async (options: MigrationOptions) => { const migrations = await loadMigrations(MIGRATIONS_DIR, runner_config); // Use the provided config to connect to Mongo - const store = createMongoMigrationStore(client); + const store = createMongoMigrationStore(clientDB); const state = await store.load(); diff --git a/packages/service-core/src/migrations/store/migration-store.ts b/packages/service-core/src/migrations/store/migration-store.ts index e89904ff8..9ade3b312 100644 --- a/packages/service-core/src/migrations/store/migration-store.ts +++ b/packages/service-core/src/migrations/store/migration-store.ts @@ -1,4 +1,4 @@ -import Mongo from 'mongodb'; +import { Db } from 'mongodb'; import * as path from 'path'; import * as defs from '../definitions.js'; @@ -11,8 +11,8 @@ export type MigrationStore = { * A custom store for node-migrate which is used to save and load migrations that have * been operated on to mongo. */ -export const createMongoMigrationStore = (client: Mongo.MongoClient): MigrationStore => { - const collection = client.db().collection('migrations'); +export const createMongoMigrationStore = (db: Db): MigrationStore => { + const collection = db.collection('migrations'); return { load: async () => { From 978bf78ece5988954039d6cc92a26614759baa20 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Mon, 24 Jun 2024 12:26:10 +0200 Subject: [PATCH 33/36] don't use deprecated route path --- packages/service-core/src/routes/hooks.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/service-core/src/routes/hooks.ts b/packages/service-core/src/routes/hooks.ts index 4d3f3e322..50f24f187 100644 --- a/packages/service-core/src/routes/hooks.ts +++ b/packages/service-core/src/routes/hooks.ts @@ -25,7 +25,6 @@ export const createRequestQueueHook = (params: CreateRequestQueueParams): fastif status: 429, method: request.method, path: request.url, - route: request.routerPath, queue_overflow: true }); return reply.status(429).send(); From f16223f4de5ef130fff46befff7b08fcd8c19715 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Mon, 24 Jun 2024 12:53:07 +0200 Subject: [PATCH 34/36] add comments --- service/src/runners/server.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/service/src/runners/server.ts b/service/src/runners/server.ts index bf453989e..bb525a26a 100644 --- a/service/src/runners/server.ts +++ b/service/src/runners/server.ts @@ -18,7 +18,12 @@ export async function startServer(runnerConfig: core.utils.RunnerConfig) { const server = fastify.fastify(); - // Create a separate context for concurrency queueing + /** + * Fastify creates an encapsulated context for each `.register` call. + * Creating a separate context here to separate the concurrency limits for Admin APIs + * and Sync Streaming routes. + * https://github.com/fastify/fastify/blob/main/docs/Reference/Encapsulation.md + */ server.register(async function (childContext) { core.routes.registerFastifyRoutes( childContext, From 8c4ac939033ddae15807c3823995726264e7a16e Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Mon, 24 Jun 2024 15:07:42 +0200 Subject: [PATCH 35/36] add log for streaming requests --- packages/rsocket-router/src/router/ReactiveSocketRouter.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts index e500150f0..3dada5325 100644 --- a/packages/rsocket-router/src/router/ReactiveSocketRouter.ts +++ b/packages/rsocket-router/src/router/ReactiveSocketRouter.ts @@ -122,6 +122,7 @@ export async function handleReactiveStream( ) { const { payload, responder, initialN } = request; const { metadata } = payload; + const startTime = new Date(); const exitWithError = (error: any) => { responder.onError(error); @@ -171,5 +172,9 @@ export async function handleReactiveStream( logger.error(ex); responder.onError(ex); responder.onComplete(); + } finally { + logger.info(`STREAM ${path}`, { + duration_ms: Math.round(new Date().valueOf() - startTime.valueOf() + Number.EPSILON) + }); } } From a8cb8866c3d7b92bcad9bdf252ec991d044ce516 Mon Sep 17 00:00:00 2001 From: Steven Ontong Date: Mon, 24 Jun 2024 15:08:30 +0200 Subject: [PATCH 36/36] added prettier script --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2a896dfa2..bbae4c7f3 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,7 @@ "author": "JourneyApps Platform", "scripts": { "check-updates": "ncu -u --deep", - "format": "journey-formatter format", + "format": "prettier --write .", "build": "pnpm run -r build && pnpm run -r build:tests", "build:packages": "pnpm run build", "build:production": "NODE_ENV=production pnpm run -r build",