diff --git a/.github/workflows/build-test-chassis-srv.yaml b/.github/workflows/build-test-chassis-srv.yaml new file mode 100644 index 00000000..50e5ba39 --- /dev/null +++ b/.github/workflows/build-test-chassis-srv.yaml @@ -0,0 +1,31 @@ +name: build-test-chassis-srv + +on: + workflow_dispatch: {} + push: + paths: + - 'packages/chassis-srv/**' + - '.github/workflows/build-test-chassis-srv.yaml' + - '.github/workflows/base-build-job.yaml' + - '.github/workflows/base-test-job.yaml' + pull_request: + paths: + - 'packages/chassis-srv/**' + - '.github/workflows/build-test-chassis-srv.yaml' + - '.github/workflows/base-build-job.yaml' + - '.github/workflows/base-test-job.yaml' + +jobs: + build: + uses: ./.github/workflows/base-build-job.yaml + with: + package: chassis-srv + + test: + needs: build + uses: ./.github/workflows/base-test-job.yaml + with: + package: chassis-srv + coveralls: true + system: true + backing-only: true diff --git a/.github/workflows/build-test-resource-base-interface.yaml b/.github/workflows/build-test-resource-base-interface.yaml new file mode 100644 index 00000000..532a881f --- /dev/null +++ b/.github/workflows/build-test-resource-base-interface.yaml @@ -0,0 +1,31 @@ +name: build-test-resource-base-interface + +on: + workflow_dispatch: {} + push: + paths: + - 'packages/resource-base-interface/**' + - '.github/workflows/build-test-resource-base-interface.yaml' + - '.github/workflows/base-build-job.yaml' + - '.github/workflows/base-test-job.yaml' + pull_request: + paths: + - 'packages/resource-base-interface/**' + - '.github/workflows/build-test-resource-base-interface.yaml' + - '.github/workflows/base-build-job.yaml' + - '.github/workflows/base-test-job.yaml' + +jobs: + build: + uses: ./.github/workflows/base-build-job.yaml + with: + package: resource-base-interface + + test: + needs: build + uses: ./.github/workflows/base-test-job.yaml + with: + package: resource-base-interface + coveralls: true + system: true + backing-only: true diff --git a/package-lock.json b/package-lock.json index 8c209285..e7dfa0cd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2333,6 +2333,23 @@ "node": ">=18" } }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.9.tgz", + "integrity": "sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@esbuild/netbsd-x64": { "version": "0.23.1", "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.23.1.tgz", @@ -2381,6 +2398,23 @@ "node": ">=18" } }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.9.tgz", + "integrity": "sha512-4Xd0xNiMVXKh6Fa7HEJQbrpP3m3DDn43jKxMjxLLRjWnRsfxjORYJlXPO4JNcXtOyfajXorRKY9NkOpTHptErg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@esbuild/sunos-x64": { "version": "0.23.1", "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.23.1.tgz", @@ -2446,9 +2480,10 @@ } }, "node_modules/@eslint-community/eslint-utils": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz", - "integrity": "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==", + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", + "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", + "license": "MIT", "dependencies": { "eslint-visitor-keys": "^3.4.3" }, @@ -2530,6 +2565,16 @@ "node": "*" } }, + "node_modules/@eslint/config-helpers": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.1.tgz", + "integrity": "sha512-xR93k9WhrDYpXHORXpxVL5oHj3Era7wo6k/Wd8/IsQNnZUTzkGS29lyn3nAT05v6ltUuTFVCCYDEGfy2Or/sPA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, "node_modules/@eslint/core": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.7.0.tgz", @@ -2620,9 +2665,10 @@ } }, "node_modules/@eslint/object-schema": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.4.tgz", - "integrity": "sha512-BsWiH1yFGjXXS2yvrf5LyuoSIIbPrGUWob917o+BTKuZ7qJdxX8aJLRxs1fS9n6r7vESrq1OUqb68dANcFXuQQ==", + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", + "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", + "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } @@ -2876,9 +2922,10 @@ } }, "node_modules/@grpc/grpc-js": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.2.tgz", - "integrity": "sha512-bgxdZmgTrJZX50OjyVwz3+mNEnCTNkh3cIqGPWVNeW9jX6bn1ZkU80uPd+67/ZpIJIjRQ9qaHCjhavyoWYxumg==", + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.4.tgz", + "integrity": "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==", + "license": "Apache-2.0", "dependencies": { "@grpc/proto-loader": "^0.7.13", "@js-sdsl/ordered-map": "^4.4.2" @@ -2971,6 +3018,46 @@ "url": "https://github.com/sponsors/nzakas" } }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/@humanwhocodes/module-importer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", @@ -2983,10 +3070,19 @@ "url": "https://github.com/sponsors/nzakas" } }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", + "dev": true, + "license": "BSD-3-Clause" + }, "node_modules/@humanwhocodes/retry": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.0.tgz", - "integrity": "sha512-xnRgu9DxZbkWak/te3fcytNyp8MTbuiZIaueg2rgEvBuN55n04nwLYLU9TX/VVlusc9L2ZNXi99nUFNkHXtr5g==", + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "license": "Apache-2.0", "engines": { "node": ">=18.18" }, @@ -3328,6 +3424,12 @@ "url": "https://opencollective.com/js-sdsl" } }, + "node_modules/@keyv/serialize": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@keyv/serialize/-/serialize-1.1.1.tgz", + "integrity": "sha512-dXn3FZhPv0US+7dtJsIi2R+c7qWYiReoEh5zUntWCf4oSpMNib8FDhSoed6m3QyZdx5hK7iLFkYk3rNxwt8vTA==", + "license": "MIT" + }, "node_modules/@koa/cors": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/@koa/cors/-/cors-5.0.0.tgz", @@ -5341,6 +5443,10 @@ "resolved": "packages/acs-client", "link": true }, + "node_modules/@restorecommerce/chassis-srv": { + "resolved": "packages/chassis-srv", + "link": true + }, "node_modules/@restorecommerce/cluster-service": { "resolved": "packages/cluster-service", "link": true @@ -5393,6 +5499,10 @@ "resolved": "packages/rc-grpc-clients", "link": true }, + "node_modules/@restorecommerce/resource-base-interface": { + "resolved": "packages/resource-base-interface", + "link": true + }, "node_modules/@restorecommerce/scs-jobs": { "resolved": "packages/scs-jobs", "link": true @@ -5759,6 +5869,13 @@ "node": ">=14.16" } }, + "node_modules/@tokenizer/token": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", + "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", + "dev": true, + "license": "MIT" + }, "node_modules/@tootallnate/once": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", @@ -5842,6 +5959,16 @@ "integrity": "sha512-8iHVLHsCCOBKjCF2KwFe0p9Z3rfM9mL+sSP8btyR5vTjJRAqpBYD28/ZLgXPf0pjG1VxOvtCV/BgXkQbpSe8Hw==", "dev": true }, + "node_modules/@types/async-retry": { + "version": "1.4.9", + "resolved": "https://registry.npmjs.org/@types/async-retry/-/async-retry-1.4.9.tgz", + "integrity": "sha512-s1ciZQJzRh3708X/m3vPExr5KJlzlZJvXsKpbtE2luqNcbROr64qU+3KpJsYHqWMeaxI839OvXf9PrUSw1Xtyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/retry": "*" + } + }, "node_modules/@types/aws-sdk2-types": { "version": "0.0.5", "resolved": "https://registry.npmjs.org/@types/aws-sdk2-types/-/aws-sdk2-types-0.0.5.tgz", @@ -5868,6 +5995,13 @@ "@types/node": "*" } }, + "node_modules/@types/cache-manager": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@types/cache-manager/-/cache-manager-4.0.6.tgz", + "integrity": "sha512-8qL93MF05/xrzFm/LSPtzNEOE1eQF3VwGHAcQEylgp5hDSTe41jtFwbSYAPfyYcVa28y1vYSjIt0c1fLLUiC/Q==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/chai": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", @@ -6207,6 +6341,16 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/nedb": { + "version": "1.8.16", + "resolved": "https://registry.npmjs.org/@types/nedb/-/nedb-1.8.16.tgz", + "integrity": "sha512-ND+uzwAZk7ZI9byOvHGOcZe2R9XUcLF698yDJKn00trFvh+GaemkX3gQKCSKtObjDpv8Uuou+k8v4x4scPr4TA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/node": { "version": "22.8.7", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.8.7.tgz", @@ -6507,20 +6651,21 @@ } }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.12.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.12.2.tgz", - "integrity": "sha512-gQxbxM8mcxBwaEmWdtLCIGLfixBMHhQjBqR8sVWNTPpcj45WlYL2IObS/DNMLH1DBP0n8qz+aiiLTGfopPEebw==", + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.43.0.tgz", + "integrity": "sha512-8tg+gt7ENL7KewsKMKDHXR1vm8tt9eMxjJBYINf6swonlWgkYn5NwyIgXpbbDxTNU5DgpDFfj95prcTq2clIQQ==", "dev": true, + "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.12.2", - "@typescript-eslint/type-utils": "8.12.2", - "@typescript-eslint/utils": "8.12.2", - "@typescript-eslint/visitor-keys": "8.12.2", + "@typescript-eslint/scope-manager": "8.43.0", + "@typescript-eslint/type-utils": "8.43.0", + "@typescript-eslint/utils": "8.43.0", + "@typescript-eslint/visitor-keys": "8.43.0", "graphemer": "^1.4.0", - "ignore": "^5.3.1", + "ignore": "^7.0.0", "natural-compare": "^1.4.0", - "ts-api-utils": "^1.3.0" + "ts-api-utils": "^2.1.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -6530,25 +6675,45 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", - "eslint": "^8.57.0 || ^9.0.0" + "@typescript-eslint/parser": "^8.43.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "peerDependencies": { + "typescript": ">=4.8.4" } }, "node_modules/@typescript-eslint/parser": { - "version": "8.12.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.12.2.tgz", - "integrity": "sha512-MrvlXNfGPLH3Z+r7Tk+Z5moZAc0dzdVjTgUgwsdGweH7lydysQsnSww3nAmsq8blFuRD5VRlAr9YdEFw3e6PBw==", + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.43.0.tgz", + "integrity": "sha512-B7RIQiTsCBBmY+yW4+ILd6mF5h1FUwJsVvpqkrgpszYifetQ2Ke+Z4u6aZh0CblkUGIdR59iYVyXqqZGkZ3aBw==", "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.12.2", - "@typescript-eslint/types": "8.12.2", - "@typescript-eslint/typescript-estree": "8.12.2", - "@typescript-eslint/visitor-keys": "8.12.2", + "@typescript-eslint/scope-manager": "8.43.0", + "@typescript-eslint/types": "8.43.0", + "@typescript-eslint/typescript-estree": "8.43.0", + "@typescript-eslint/visitor-keys": "8.43.0", "debug": "^4.3.4" }, "engines": { @@ -6559,40 +6724,76 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0" + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/project-service": { + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.43.0.tgz", + "integrity": "sha512-htB/+D/BIGoNTQYffZw4uM4NzzuolCoaA/BusuSIcC8YjmBYQioew5VUZAYdAETPjeed0hqCaW7EHg+Robq8uw==", + "license": "MIT", + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.43.0", + "@typescript-eslint/types": "^8.43.0", + "debug": "^4.3.4" }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.12.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.12.2.tgz", - "integrity": "sha512-gPLpLtrj9aMHOvxJkSbDBmbRuYdtiEbnvO25bCMza3DhMjTQw0u7Y1M+YR5JPbMsXXnSPuCf5hfq0nEkQDL/JQ==", + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.43.0.tgz", + "integrity": "sha512-daSWlQ87ZhsjrbMLvpuuMAt3y4ba57AuvadcR7f3nl8eS3BjRc8L9VLxFLk92RL5xdXOg6IQ+qKjjqNEimGuAg==", + "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.12.2", - "@typescript-eslint/visitor-keys": "8.12.2" + "@typescript-eslint/types": "8.43.0", + "@typescript-eslint/visitor-keys": "8.43.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.43.0.tgz", + "integrity": "sha512-ALC2prjZcj2YqqL5X/bwWQmHA2em6/94GcbB/KKu5SX3EBDOsqztmmX1kMkvAJHzxk7TazKzJfFiEIagNV3qEA==", + "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.12.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.12.2.tgz", - "integrity": "sha512-bwuU4TAogPI+1q/IJSKuD4shBLc/d2vGcRT588q+jzayQyjVK2X6v/fbR4InY2U2sgf8MEvVCqEWUzYzgBNcGQ==", + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.43.0.tgz", + "integrity": "sha512-qaH1uLBpBuBBuRf8c1mLJ6swOfzCXryhKND04Igr4pckzSEW9JX5Aw9AgW00kwfjWJF0kk0ps9ExKTfvXfw4Qg==", "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/typescript-estree": "8.12.2", - "@typescript-eslint/utils": "8.12.2", + "@typescript-eslint/types": "8.43.0", + "@typescript-eslint/typescript-estree": "8.43.0", + "@typescript-eslint/utils": "8.43.0", "debug": "^4.3.4", - "ts-api-utils": "^1.3.0" + "ts-api-utils": "^2.1.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -6601,16 +6802,29 @@ "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" } }, "node_modules/@typescript-eslint/types": { - "version": "8.12.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz", - "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==", + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.43.0.tgz", + "integrity": "sha512-vQ2FZaxJpydjSZJKiSW/LJsabFFvV7KgLC5DiLhkBcykhQj8iK9BOaDmQt74nnKdLvceM5xmhaTF+pLekrxEkw==", + "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -6620,18 +6834,21 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.12.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.12.2.tgz", - "integrity": "sha512-mME5MDwGe30Pq9zKPvyduyU86PH7aixwqYR2grTglAdB+AN8xXQ1vFGpYaUSJ5o5P/5znsSBeNcs5g5/2aQwow==", + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.43.0.tgz", + "integrity": "sha512-7Vv6zlAhPb+cvEpP06WXXy/ZByph9iL6BQRBDj4kmBsW98AqEeQHlj/13X+sZOrKSo9/rNKH4Ul4f6EICREFdw==", + "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.12.2", - "@typescript-eslint/visitor-keys": "8.12.2", + "@typescript-eslint/project-service": "8.43.0", + "@typescript-eslint/tsconfig-utils": "8.43.0", + "@typescript-eslint/types": "8.43.0", + "@typescript-eslint/visitor-keys": "8.43.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" + "ts-api-utils": "^2.1.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -6640,16 +6857,15 @@ "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" }, @@ -6657,15 +6873,28 @@ "node": ">=10" } }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, "node_modules/@typescript-eslint/utils": { - "version": "8.12.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.12.2.tgz", - "integrity": "sha512-UTTuDIX3fkfAz6iSVa5rTuSfWIYZ6ATtEocQ/umkRSyC9O919lbZ8dcH7mysshrCdrAM03skJOEYaBugxN+M6A==", + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.43.0.tgz", + "integrity": "sha512-S1/tEmkUeeswxd0GGcnwuVQPFWo8NzZTOMxCvw8BX7OMxnNae+i8Tm7REQen/SwUIPoPqfKn7EaZ+YLpiB3k9g==", + "license": "MIT", "dependencies": { - "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.12.2", - "@typescript-eslint/types": "8.12.2", - "@typescript-eslint/typescript-estree": "8.12.2" + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.43.0", + "@typescript-eslint/types": "8.43.0", + "@typescript-eslint/typescript-estree": "8.43.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -6675,16 +6904,18 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0" + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.12.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz", - "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==", + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.43.0.tgz", + "integrity": "sha512-T+S1KqRD4sg/bHfLwrpF/K3gQLBM1n7Rp7OjjikjTEssI2YJzQpi5WXoynOaQ93ERIuq3O8RBTOUYDKszUCEHw==", + "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.12.2", - "eslint-visitor-keys": "^3.4.3" + "@typescript-eslint/types": "8.43.0", + "eslint-visitor-keys": "^4.2.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -6695,16 +6926,24 @@ } }, "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", - "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "license": "Apache-2.0", "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "url": "https://opencollective.com/eslint" } }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, "node_modules/@vitest/coverage-v8": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-2.1.4.tgz", @@ -6992,9 +7231,10 @@ } }, "node_modules/acorn": { - "version": "8.14.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", - "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -7170,6 +7410,33 @@ "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", "dev": true }, + "node_modules/arangojs": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/arangojs/-/arangojs-9.3.0.tgz", + "integrity": "sha512-+z/TxumH8ywsXAN0oyQAxMnBMtDbd4tjFhGXzygAHD8+YDzEC705STLBh38KGbwxWeekalu2XEHUtrJ/NONgTQ==", + "license": "Apache-2.0", + "dependencies": { + "@types/node": "^20.11.26" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/arangojs/node_modules/@types/node": { + "version": "20.19.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.13.tgz", + "integrity": "sha512-yCAeZl7a0DxgNVteXFHt9+uyFbqXGy/ShC4BlcHkoE0AfGXYv/BUiplV72DjMYXHDBXFjhvr6DD1NiRVfB4j8g==", + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/arangojs/node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "license": "MIT" + }, "node_modules/arg": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", @@ -7689,6 +7956,14 @@ "integrity": "sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==", "optional": true }, + "node_modules/binary-search-tree": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/binary-search-tree/-/binary-search-tree-0.2.5.tgz", + "integrity": "sha512-CvNVKS6iXagL1uGwLagSXz1hzSMezxOuGnFi5FHGKqaTO3nPPWrAbyALUzK640j+xOTVm7lzD9YP8W1f/gvUdw==", + "dependencies": { + "underscore": "~1.4.4" + } + }, "node_modules/bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -8105,6 +8380,24 @@ "node": ">= 6.0.0" } }, + "node_modules/cache-manager": { + "version": "6.4.3", + "resolved": "https://registry.npmjs.org/cache-manager/-/cache-manager-6.4.3.tgz", + "integrity": "sha512-VV5eq/QQ5rIVix7/aICO4JyvSeEv9eIQuKL5iFwgM2BrcYoE0A/D1mNsAHJAsB0WEbNdBlKkn6Tjz6fKzh/cKQ==", + "license": "MIT", + "dependencies": { + "keyv": "^5.3.3" + } + }, + "node_modules/cache-manager/node_modules/keyv": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-5.5.1.tgz", + "integrity": "sha512-eF3cHZ40bVsjdlRi/RvKAuB0+B61Q1xWvohnrJrnaQslM3h1n79IV+mc9EGag4nrA9ZOlNyr3TUzW5c8uy8vNA==", + "license": "MIT", + "dependencies": { + "@keyv/serialize": "^1.1.1" + } + }, "node_modules/cacheable-lookup": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz", @@ -9494,9 +9787,10 @@ } }, "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -10004,6 +10298,19 @@ "node": ">=8" } }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/dom-serializer": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", @@ -10017,6 +10324,12 @@ "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" } }, + "node_modules/dom-walk": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", + "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==", + "dev": true + }, "node_modules/domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", @@ -11517,6 +11830,24 @@ "node": ">=16.0.0" } }, + "node_modules/file-type": { + "version": "16.5.4", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-16.5.4.tgz", + "integrity": "sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "readable-web-to-node-stream": "^3.0.0", + "strtok3": "^6.2.4", + "token-types": "^4.1.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/file-type?sponsor=1" + } + }, "node_modules/file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", @@ -12718,10 +13049,21 @@ "node": "*" } }, - "node_modules/global-directory": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/global-directory/-/global-directory-4.0.1.tgz", - "integrity": "sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==", + "node_modules/global": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz", + "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==", + "dev": true, + "license": "MIT", + "dependencies": { + "min-document": "^2.19.0", + "process": "^0.11.10" + } + }, + "node_modules/global-directory": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/global-directory/-/global-directory-4.0.1.tgz", + "integrity": "sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==", "dev": true, "optional": true, "dependencies": { @@ -13522,6 +13864,12 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", + "license": "MIT" + }, "node_modules/immutable": { "version": "3.7.6", "resolved": "https://registry.npmjs.org/immutable/-/immutable-3.7.6.tgz", @@ -14130,6 +14478,13 @@ "node": ">=8" } }, + "node_modules/is-function": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.2.tgz", + "integrity": "sha512-lw7DUp0aWXYg+CBCN+JKkcE0Q2RayZnSvnZBlwgxHBQhqt5pZNVy4Ri7H9GmmXkdu7LUthszM+Tor1u/2iBcpQ==", + "dev": true, + "license": "MIT" + }, "node_modules/is-generator": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-generator/-/is-generator-1.0.3.tgz", @@ -14284,6 +14639,16 @@ "node": ">=8" } }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/is-plain-obj": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", @@ -15057,6 +15422,15 @@ "integrity": "sha512-OYTthRfSh55WOItVqwpefPtNt2VdKsq5AnAK6apdtR6yCH8pr0CmSr710J0Mf+WdQy7K/OzMy7K2MgAfdQURDw==", "dev": true }, + "node_modules/kafkajs": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/kafkajs/-/kafkajs-2.2.4.tgz", + "integrity": "sha512-j/YeapB1vfPT2iOIUn/vxdyKEuhuY2PxMBvf5JWux6iSaukAccrMtXEY/Lb7OvavDhOWME589bpLrEdnVHjfjA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/keygrip": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/keygrip/-/keygrip-1.1.0.tgz", @@ -15737,6 +16111,15 @@ "node": ">=10" } }, + "node_modules/lie": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.1.1.tgz", + "integrity": "sha512-RiNhHysUjhrDQntfYSfY4MU24coXXdEOgw9WGcKHNeEwffDYbF//u87M1EWaMGzuFoSbqW0C9C6lEEhDOAswfw==", + "license": "MIT", + "dependencies": { + "immediate": "~3.0.5" + } + }, "node_modules/lines-and-columns": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-2.0.3.tgz", @@ -15776,6 +16159,15 @@ "node": ">=8" } }, + "node_modules/localforage": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/localforage/-/localforage-1.10.0.tgz", + "integrity": "sha512-14/H1aX7hzBBmmh7sGPd+AOMkkIrHM3Z1PAyGgZigA1H1p5O5ANnMyWzvpAETtG68/dC4pC0ncy3+PPGzXZHPg==", + "license": "Apache-2.0", + "dependencies": { + "lie": "3.1.1" + } + }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -16582,6 +16974,20 @@ "node": ">= 0.6" } }, + "node_modules/mime-kind": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mime-kind/-/mime-kind-4.0.0.tgz", + "integrity": "sha512-qQvglvSpS5mABi30beNFd+uHKtKkxD3dxAmhi2e589XKx+WfVqhg5i5P5LBcVgwwv3BiDpNMBWrHqU+JexW4aA==", + "dev": true, + "license": "MIT", + "dependencies": { + "file-type": "^16.5.4", + "mime-types": "^2.1.24" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/mime-types": { "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", @@ -16613,6 +17019,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/min-document": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", + "integrity": "sha512-9Wy1B3m3f66bPPmU5hdA4DR4PB2OfDU/+GS3yAB7IQozE3tqXaVv2zOjgla7MEGSRv95+ILmOuvhLkOK6wJtCQ==", + "dev": true, + "dependencies": { + "dom-walk": "^0.1.0" + } + }, "node_modules/min-indent": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", @@ -17074,6 +17489,30 @@ "node": ">=12" } }, + "node_modules/multi-part": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/multi-part/-/multi-part-4.0.0.tgz", + "integrity": "sha512-YT/CS0PAe62kT8EoQXcQj8yIcSu18HhYv0s6ShdAFsoFly3oV5QaxODnkj0u7zH0/RFyH47cdcMVpcGXliEFVA==", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-kind": "^4.0.0", + "multi-part-lite": "^1.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/multi-part-lite": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/multi-part-lite/-/multi-part-lite-1.0.0.tgz", + "integrity": "sha512-KxIRbBZZ45hoKX1ROD/19wJr0ql1bef1rE8Y1PCwD3PuNXV42pp7Wo8lEHYuAajoT4vfAFcd3rPjlkyEEyt1nw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.3.0" + } + }, "node_modules/multimatch": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/multimatch/-/multimatch-5.0.0.tgz", @@ -17302,6 +17741,36 @@ "node": ">=10" } }, + "node_modules/nedb": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/nedb/-/nedb-1.8.0.tgz", + "integrity": "sha512-ip7BJdyb5m+86ZbSb4y10FCCW9g35+U8bDRrZlAfCI6m4dKwEsQ5M52grcDcVK4Vm/vnPlDLywkyo3GliEkb5A==", + "license": "SEE LICENSE IN LICENSE", + "dependencies": { + "async": "0.2.10", + "binary-search-tree": "0.2.5", + "localforage": "^1.3.0", + "mkdirp": "~0.5.1", + "underscore": "~1.4.4" + } + }, + "node_modules/nedb/node_modules/async": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "integrity": "sha512-eAkdoKxU6/LkKDBzLpT+t6Ff5EtfSF4wx1WfJiPEEV7WNLnDaRXk0oVysiEPm262roaachGexwUv94WhSgN5TQ==" + }, + "node_modules/nedb/node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "license": "MIT", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, "node_modules/negotiator": { "version": "0.6.4", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", @@ -17322,11 +17791,12 @@ "optional": true }, "node_modules/nice-grpc": { - "version": "2.1.10", - "resolved": "https://registry.npmjs.org/nice-grpc/-/nice-grpc-2.1.10.tgz", - "integrity": "sha512-Nujs/4wWJvE5OSxWPp3M5H+zHJAgsWMo38bMNfKQP1VDeCChp7MiKTkhJBV5JZvrBIkPhYQCLIbfvVqEoSuTuA==", + "version": "2.1.12", + "resolved": "https://registry.npmjs.org/nice-grpc/-/nice-grpc-2.1.12.tgz", + "integrity": "sha512-J1n4Wg+D3IhRhGQb+iqh2OpiM0GzTve/kf2lnlW4S+xczmIEd0aHUDV1OsJ5a3q8GSTqJf+s4Rgg1M8uJltarw==", + "license": "MIT", "dependencies": { - "@grpc/grpc-js": "^1.10.8", + "@grpc/grpc-js": "^1.13.1", "abort-controller-x": "^0.4.0", "nice-grpc-common": "^2.0.2" } @@ -17356,6 +17826,17 @@ "ts-error": "^1.0.6" } }, + "node_modules/nice-grpc-server-reflection": { + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/nice-grpc-server-reflection/-/nice-grpc-server-reflection-2.0.14.tgz", + "integrity": "sha512-IOVHgJgUeiaqXTCiwQr8+njuKZR8EWqe1ugvsrZJue2j8jqnyX93BQL4DIShZvNL9rjG1RWr1RbKERFRHsq3+A==", + "license": "MIT", + "dependencies": { + "@types/google-protobuf": "^3.7.4", + "google-protobuf": "^3.15.6", + "nice-grpc": "^2.1.12" + } + }, "node_modules/nice-try": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", @@ -19063,6 +19544,13 @@ "node": ">=0.8" } }, + "node_modules/parse-headers": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/parse-headers/-/parse-headers-2.0.6.tgz", + "integrity": "sha512-Tz11t3uKztEW5FEVZnj1ox8GKblWn+PvHY9TmJV5Mll2uHEwRdR/5Li1OlXoECjLYkApdhWy44ocONwXLiKO5A==", + "dev": true, + "license": "MIT" + }, "node_modules/parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", @@ -19177,6 +19665,13 @@ "node": ">=0.10.0" } }, + "node_modules/path-browserify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==", + "dev": true, + "license": "MIT" + }, "node_modules/path-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/path-case/-/path-case-3.0.4.tgz", @@ -19298,6 +19793,20 @@ "through": "~2.3" } }, + "node_modules/peek-readable": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-4.1.0.tgz", + "integrity": "sha512-ZI3LnwUv5nOGbQzD9c2iDG6toheuXSZP5esSHBjopsXH4dg19soufvpUGA3uohi5anFtGb2lhAVdHzH6R/Evvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -20189,6 +20698,65 @@ "node": ">= 6" } }, + "node_modules/readable-web-to-node-stream": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.4.tgz", + "integrity": "sha512-9nX56alTf5bwXQ3ZDipHJhusu9NTQJ/CVPtb/XHAJCXihZeitfJvIRS4GqQ/mfIoOE3IelHMrpayVrosdHBuLw==", + "dev": true, + "license": "MIT", + "dependencies": { + "readable-stream": "^4.7.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/readable-web-to-node-stream/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/readable-web-to-node-stream/node_modules/readable-stream": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", + "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", + "dev": true, + "license": "MIT", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/readdirp": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.0.2.tgz", @@ -21867,6 +22435,24 @@ "node": ">=4" } }, + "node_modules/strtok3": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-6.3.0.tgz", + "integrity": "sha512-fZtbhtvI9I48xDSywd/somNqgUHl2L2cstmXCCif0itOf96jeW18MBSyrLuNicYQVkvpOxkZtkzujiTJ9LW5Jw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tokenizer/token": "^0.3.0", + "peek-readable": "^4.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, "node_modules/superagent": { "version": "9.0.2", "resolved": "https://registry.npmjs.org/superagent/-/superagent-9.0.2.tgz", @@ -22334,6 +22920,24 @@ "node": ">=0.6" } }, + "node_modules/token-types": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/token-types/-/token-types-4.2.1.tgz", + "integrity": "sha512-6udB24Q737UD/SDsKAHI9FCRP7Bqc9D/MQUV02ORQg5iskjtLJlZJNdN4kKtcdtwCeWIwIHDGaUsTsCCAa8sFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tokenizer/token": "^0.3.0", + "ieee754": "^1.2.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, "node_modules/touch": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz", @@ -23039,6 +23643,11 @@ "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==", "dev": true }, + "node_modules/underscore": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.4.4.tgz", + "integrity": "sha512-ZqGrAgaqqZM7LGRzNjLnw5elevWb5M8LEoDMadxIW3OWbcv72wMMgKdwOKpd5Fqxe8choLD8HN3iSj3TUh/giQ==" + }, "node_modules/undici": { "version": "6.20.1", "resolved": "https://registry.npmjs.org/undici/-/undici-6.20.1.tgz", @@ -24595,6 +25204,29 @@ } } }, + "node_modules/x3-linkedlist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/x3-linkedlist/-/x3-linkedlist-1.2.0.tgz", + "integrity": "sha512-mH/YwxpYSKNa8bDNF1yOuZCMuV+K80LtDN8vcLDUAwNazCxptDNsYt+zA/EJeYiGbdtKposhKLZjErGVOR8mag==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/xhr": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/xhr/-/xhr-2.6.0.tgz", + "integrity": "sha512-/eCGLb5rxjx5e3mF1A7s+pLlR6CGyqWN91fv1JgER5mVWg1MZmlhBvy9kjcsOdRk8RrIujotWyJamfyrp+WIcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "global": "~4.4.0", + "is-function": "^1.0.1", + "parse-headers": "^2.0.0", + "xtend": "^4.0.0" + } + }, "node_modules/xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", @@ -24885,236 +25517,163 @@ "vitest": "^2.1.4" } }, - "packages/cluster-service": { - "name": "@restorecommerce/cluster-service", - "version": "1.0.4", + "packages/chassis-srv": { + "name": "@restorecommerce/chassis-srv", + "version": "1.6.8", "license": "MIT", "dependencies": { - "forever-monitor": "^3.0.3", - "recluster": "^1.0.0" + "@restorecommerce/grpc-client": "^2.2.7", + "@restorecommerce/kafka-client": "^1.2.28", + "@restorecommerce/logger": "^1.3.3", + "@restorecommerce/rc-grpc-clients": "^5.1.50", + "@restorecommerce/service-config": "^1.1.1", + "abort-controller-x": "^0.4.3", + "arangojs": "^9.1.0", + "async": "^3.2.6", + "async-retry": "^1.3.3", + "cache-manager": "^6.1.3", + "cls-hooked": "^4.2.2", + "cls-rtracer": "^2.6.3", + "google-protobuf": "^3.21.4", + "kafkajs": "^2.2.4", + "keyv": "^5.2.1", + "lodash": "^4.17.21", + "long": "^5.2.3", + "lru-cache": "^11.0.2", + "nedb": "^1.8.0", + "nice-grpc": "^2.1.10", + "nice-grpc-server-reflection": "^2.0.12", + "protobufjs": "^7.4.0", + "redis": "^4.7.0" }, "devDependencies": { - "is-generator": "^1.0.3", - "should": "^13.2.3", - "vitest": "^2.1.4" - }, - "engines": { - "node": ">= 10.0.0" - } - }, - "packages/dev": { - "name": "@restorecommerce/dev", - "version": "0.0.13", - "license": "MIT", - "dependencies": { - "@eslint/compat": "^1.0.3", - "@stylistic/eslint-plugin": "^2.10.1", - "@typescript-eslint/eslint-plugin": "8.10.0", - "@typescript-eslint/parser": "8.10.0", + "@restorecommerce/dev": "^0.0.13", + "@types/async": "^3.2.6", + "@types/async-retry": "^1.4.9", + "@types/cache-manager": "^4.0.6", + "@types/lodash": "^4.17.13", + "@types/nedb": "^1.8.16", + "@types/node": "^22.9.1", + "@typescript-eslint/parser": "^8.15.0", "@vitest/coverage-v8": "^3.2.4", - "eslint": "^9.12.0", - "eslint-plugin-file-extension-in-import-ts": "^2.1.0", - "eslint-plugin-prefer-arrow-functions": "^3.4.1", - "eslint-plugin-unicorn": "56.0.0", - "typescript-eslint": "8.11.0", + "cross-env": "^7.0.3", + "eslint": "^9.15.0", + "npm-run-all": "^4.1.5", + "rimraf": "^6.0.1", + "rxjs": "^7.8.1", + "should": "^13.2.3", + "typescript": "^5.6.3", + "typescript-eslint": "^8.15.0", "vitest": "^3.2.4" }, "engines": { - "node": ">=20" - }, - "peerDependencies": { - "@types/node": ">=18.15.3", - "eslint": ">=8.42.0", - "typescript": ">=5.0.0" + "node": ">= 20.0.0" } }, - "packages/dev/node_modules/@bcoe/v8-coverage": { + "packages/chassis-srv/node_modules/@bcoe/v8-coverage": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz", "integrity": "sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" } }, - "packages/dev/node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.10.0.tgz", - "integrity": "sha512-phuB3hoP7FFKbRXxjl+DRlQDuJqhpOnm5MmtROXyWi3uS/Xg2ZXqiQfcG2BJHiN4QKyzdOJi3NEn/qTnjUlkmQ==", + "packages/chassis-srv/node_modules/@eslint/config-array": { + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", + "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", + "dev": true, + "license": "Apache-2.0", "dependencies": { - "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.10.0", - "@typescript-eslint/type-utils": "8.10.0", - "@typescript-eslint/utils": "8.10.0", - "@typescript-eslint/visitor-keys": "8.10.0", - "graphemer": "^1.4.0", - "ignore": "^5.3.1", - "natural-compare": "^1.4.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", - "eslint": "^8.57.0 || ^9.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "packages/dev/node_modules/@typescript-eslint/parser": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.10.0.tgz", - "integrity": "sha512-E24l90SxuJhytWJ0pTQydFT46Nk0Z+bsLKo/L8rtQSL93rQ6byd1V/QbDpHUTdLPOMsBCcYXZweADNCfOCmOAg==", - "dependencies": { - "@typescript-eslint/scope-manager": "8.10.0", - "@typescript-eslint/types": "8.10.0", - "@typescript-eslint/typescript-estree": "8.10.0", - "@typescript-eslint/visitor-keys": "8.10.0", - "debug": "^4.3.4" + "@eslint/object-schema": "^2.1.6", + "debug": "^4.3.1", + "minimatch": "^3.1.2" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } } }, - "packages/dev/node_modules/@typescript-eslint/scope-manager": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.10.0.tgz", - "integrity": "sha512-AgCaEjhfql9MDKjMUxWvH7HjLeBqMCBfIaBbzzIcBbQPZE7CPh1m6FF+L75NUMJFMLYhCywJXIDEMa3//1A0dw==", + "packages/chassis-srv/node_modules/@eslint/core": { + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.2.tgz", + "integrity": "sha512-78Md3/Rrxh83gCxoUc0EiciuOHsIITzLy53m3d9UyiW8y9Dj2D29FeETqyKA+BRK76tnTp6RXWb3pCay8Oyomg==", + "dev": true, + "license": "Apache-2.0", "dependencies": { - "@typescript-eslint/types": "8.10.0", - "@typescript-eslint/visitor-keys": "8.10.0" + "@types/json-schema": "^7.0.15" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" } }, - "packages/dev/node_modules/@typescript-eslint/type-utils": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.10.0.tgz", - "integrity": "sha512-PCpUOpyQSpxBn230yIcK+LeCQaXuxrgCm2Zk1S+PTIRJsEfU6nJ0TtwyH8pIwPK/vJoA+7TZtzyAJSGBz+s/dg==", + "packages/chassis-srv/node_modules/@eslint/eslintrc": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", + "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/typescript-estree": "8.10.0", - "@typescript-eslint/utils": "8.10.0", - "debug": "^4.3.4", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "packages/dev/node_modules/@typescript-eslint/types": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.10.0.tgz", - "integrity": "sha512-k/E48uzsfJCRRbGLapdZgrX52csmWJ2rcowwPvOZ8lwPUv3xW6CcFeJAXgx4uJm+Ge4+a4tFOkdYvSpxhRhg1w==", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "url": "https://opencollective.com/eslint" } }, - "packages/dev/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.10.0.tgz", - "integrity": "sha512-3OE0nlcOHaMvQ8Xu5gAfME3/tWVDpb/HxtpUZ1WeOAksZ/h/gwrBzCklaGzwZT97/lBbbxJ16dMA98JMEngW4w==", - "dependencies": { - "@typescript-eslint/types": "8.10.0", - "@typescript-eslint/visitor-keys": "8.10.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, + "packages/chassis-srv/node_modules/@eslint/js": { + "version": "9.35.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.35.0.tgz", + "integrity": "sha512-30iXE9whjlILfWobBkNerJo+TXYsgVM5ERQwMcMKCHckHflCmf7wXDAHlARoWnh0s1U72WqlbeyE7iAcCzuCPw==", + "dev": true, + "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "url": "https://eslint.org/donate" } }, - "packages/dev/node_modules/@typescript-eslint/utils": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.10.0.tgz", - "integrity": "sha512-Oq4uZ7JFr9d1ZunE/QKy5egcDRXT/FrS2z/nlxzPua2VHFtmMvFNDvpq1m/hq0ra+T52aUezfcjGRIB7vNJF9w==", + "packages/chassis-srv/node_modules/@eslint/plugin-kit": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.5.tgz", + "integrity": "sha512-Z5kJ+wU3oA7MMIqVR9tyZRtjYPr4OC004Q4Rw7pgOKUOKkJfZ3O24nz3WYfGRpMDNmcOi3TwQOmgm7B7Tpii0w==", + "dev": true, + "license": "Apache-2.0", "dependencies": { - "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.10.0", - "@typescript-eslint/types": "8.10.0", - "@typescript-eslint/typescript-estree": "8.10.0" + "@eslint/core": "^0.15.2", + "levn": "^0.4.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0" } }, - "packages/dev/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.10.0.tgz", - "integrity": "sha512-k8nekgqwr7FadWk548Lfph6V3r9OVqjzAIVskE7orMZR23cGJjAOVazsZSJW+ElyjfTM4wx/1g88Mi70DDtG9A==", + "packages/chassis-srv/node_modules/@types/node": { + "version": "22.18.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.1.tgz", + "integrity": "sha512-rzSDyhn4cYznVG+PCzGe1lwuMYJrcBS1fc3JqSa2PvtABwWo+dZ1ij5OVok3tqfpEBCBoaR4d7upFJk73HRJDw==", + "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.10.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "undici-types": "~6.21.0" } }, - "packages/dev/node_modules/@vitest/coverage-v8": { + "packages/chassis-srv/node_modules/@vitest/coverage-v8": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", + "dev": true, "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.3.0", @@ -25144,10 +25703,11 @@ } } }, - "packages/dev/node_modules/@vitest/expect": { + "packages/chassis-srv/node_modules/@vitest/expect": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", + "dev": true, "license": "MIT", "dependencies": { "@types/chai": "^5.2.2", @@ -25160,10 +25720,38 @@ "url": "https://opencollective.com/vitest" } }, - "packages/dev/node_modules/@vitest/pretty-format": { + "packages/chassis-srv/node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.2.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "packages/chassis-srv/node_modules/@vitest/pretty-format": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", + "dev": true, "license": "MIT", "dependencies": { "tinyrainbow": "^2.0.0" @@ -25172,10 +25760,11 @@ "url": "https://opencollective.com/vitest" } }, - "packages/dev/node_modules/@vitest/runner": { + "packages/chassis-srv/node_modules/@vitest/runner": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", + "dev": true, "license": "MIT", "dependencies": { "@vitest/utils": "3.2.4", @@ -25186,10 +25775,11 @@ "url": "https://opencollective.com/vitest" } }, - "packages/dev/node_modules/@vitest/snapshot": { + "packages/chassis-srv/node_modules/@vitest/snapshot": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", + "dev": true, "license": "MIT", "dependencies": { "@vitest/pretty-format": "3.2.4", @@ -25200,10 +25790,11 @@ "url": "https://opencollective.com/vitest" } }, - "packages/dev/node_modules/@vitest/spy": { + "packages/chassis-srv/node_modules/@vitest/spy": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "dev": true, "license": "MIT", "dependencies": { "tinyspy": "^4.0.3" @@ -25212,10 +25803,11 @@ "url": "https://opencollective.com/vitest" } }, - "packages/dev/node_modules/@vitest/utils": { + "packages/chassis-srv/node_modules/@vitest/utils": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "dev": true, "license": "MIT", "dependencies": { "@vitest/pretty-format": "3.2.4", @@ -25226,56 +25818,343 @@ "url": "https://opencollective.com/vitest" } }, - "packages/dev/node_modules/eslint-visitor-keys": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", - "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "packages/chassis-srv/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" }, "funding": { - "url": "https://opencollective.com/eslint" + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" } }, - "packages/dev/node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "license": "MIT" - }, - "packages/dev/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", - "bin": { - "semver": "bin/semver.js" + "packages/chassis-srv/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" }, "engines": { - "node": ">=10" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "packages/dev/node_modules/tinyrainbow": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", - "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "packages/chassis-srv/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, "license": "MIT", - "engines": { - "node": ">=14.0.0" + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" } }, - "packages/dev/node_modules/tinyspy": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", - "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", + "packages/chassis-srv/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/chassis-srv/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/chassis-srv/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "packages/chassis-srv/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/chassis-srv/node_modules/eslint": { + "version": "9.35.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.35.0.tgz", + "integrity": "sha512-QePbBFMJFjgmlE+cXAlbHZbHpdFVS2E/6vzCy7aKlebddvl1vadiC4JFV5u/wqTkNUwEV8WrQi257jf5f06hrg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.0", + "@eslint/config-helpers": "^0.3.1", + "@eslint/core": "^0.15.2", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.35.0", + "@eslint/plugin-kit": "^0.3.5", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "packages/chassis-srv/node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "packages/chassis-srv/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "packages/chassis-srv/node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "packages/chassis-srv/node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/chassis-srv/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/chassis-srv/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "packages/chassis-srv/node_modules/keyv": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-5.5.1.tgz", + "integrity": "sha512-eF3cHZ40bVsjdlRi/RvKAuB0+B61Q1xWvohnrJrnaQslM3h1n79IV+mc9EGag4nrA9ZOlNyr3TUzW5c8uy8vNA==", + "license": "MIT", + "dependencies": { + "@keyv/serialize": "^1.1.1" + } + }, + "packages/chassis-srv/node_modules/long": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", + "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==", + "license": "Apache-2.0" + }, + "packages/chassis-srv/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "packages/chassis-srv/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "packages/chassis-srv/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "packages/chassis-srv/node_modules/tinyrainbow": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "dev": true, "license": "MIT", "engines": { "node": ">=14.0.0" } }, - "packages/dev/node_modules/vite-node": { + "packages/chassis-srv/node_modules/tinyspy": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "packages/chassis-srv/node_modules/typescript-eslint": { + "version": "8.43.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.43.0.tgz", + "integrity": "sha512-FyRGJKUGvcFekRRcBKFBlAhnp4Ng8rhe8tuvvkR9OiU0gfd4vyvTRQHEckO6VDlH57jbeUQem2IpqPq9kLJH+w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/eslint-plugin": "8.43.0", + "@typescript-eslint/parser": "8.43.0", + "@typescript-eslint/typescript-estree": "8.43.0", + "@typescript-eslint/utils": "8.43.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "packages/chassis-srv/node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "packages/chassis-srv/node_modules/vite-node": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "dev": true, "license": "MIT", "dependencies": { "cac": "^6.7.14", @@ -25294,10 +26173,11 @@ "url": "https://opencollective.com/vitest" } }, - "packages/dev/node_modules/vitest": { + "packages/chassis-srv/node_modules/vitest": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "dev": true, "license": "MIT", "dependencies": { "@types/chai": "^5.2.2", @@ -25366,38 +26246,519 @@ } } }, - "packages/dev/node_modules/vitest/node_modules/@vitest/mocker": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", - "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "packages/cluster-service": { + "name": "@restorecommerce/cluster-service", + "version": "1.0.4", "license": "MIT", "dependencies": { - "@vitest/spy": "3.2.4", - "estree-walker": "^3.0.3", - "magic-string": "^0.30.17" - }, - "funding": { - "url": "https://opencollective.com/vitest" + "forever-monitor": "^3.0.3", + "recluster": "^1.0.0" }, - "peerDependencies": { - "msw": "^2.4.9", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + "devDependencies": { + "is-generator": "^1.0.3", + "should": "^13.2.3", + "vitest": "^2.1.4" }, - "peerDependenciesMeta": { - "msw": { - "optional": true - }, - "vite": { - "optional": true - } + "engines": { + "node": ">= 10.0.0" } }, - "packages/facade": { - "name": "@restorecommerce/facade", - "version": "2.2.23", - "hasInstallScript": true, - "license": "MIT", - "dependencies": { + "packages/dev": { + "name": "@restorecommerce/dev", + "version": "0.0.13", + "license": "MIT", + "dependencies": { + "@eslint/compat": "^1.0.3", + "@stylistic/eslint-plugin": "^2.10.1", + "@typescript-eslint/eslint-plugin": "8.10.0", + "@typescript-eslint/parser": "8.10.0", + "@vitest/coverage-v8": "^3.2.4", + "eslint": "^9.12.0", + "eslint-plugin-file-extension-in-import-ts": "^2.1.0", + "eslint-plugin-prefer-arrow-functions": "^3.4.1", + "eslint-plugin-unicorn": "56.0.0", + "typescript-eslint": "8.11.0", + "vitest": "^3.2.4" + }, + "engines": { + "node": ">=20" + }, + "peerDependencies": { + "@types/node": ">=18.15.3", + "eslint": ">=8.42.0", + "typescript": ">=5.0.0" + } + }, + "packages/dev/node_modules/@bcoe/v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz", + "integrity": "sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "packages/dev/node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.10.0.tgz", + "integrity": "sha512-phuB3hoP7FFKbRXxjl+DRlQDuJqhpOnm5MmtROXyWi3uS/Xg2ZXqiQfcG2BJHiN4QKyzdOJi3NEn/qTnjUlkmQ==", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.10.0", + "@typescript-eslint/type-utils": "8.10.0", + "@typescript-eslint/utils": "8.10.0", + "@typescript-eslint/visitor-keys": "8.10.0", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", + "eslint": "^8.57.0 || ^9.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "packages/dev/node_modules/@typescript-eslint/parser": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.10.0.tgz", + "integrity": "sha512-E24l90SxuJhytWJ0pTQydFT46Nk0Z+bsLKo/L8rtQSL93rQ6byd1V/QbDpHUTdLPOMsBCcYXZweADNCfOCmOAg==", + "dependencies": { + "@typescript-eslint/scope-manager": "8.10.0", + "@typescript-eslint/types": "8.10.0", + "@typescript-eslint/typescript-estree": "8.10.0", + "@typescript-eslint/visitor-keys": "8.10.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "packages/dev/node_modules/@typescript-eslint/scope-manager": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.10.0.tgz", + "integrity": "sha512-AgCaEjhfql9MDKjMUxWvH7HjLeBqMCBfIaBbzzIcBbQPZE7CPh1m6FF+L75NUMJFMLYhCywJXIDEMa3//1A0dw==", + "dependencies": { + "@typescript-eslint/types": "8.10.0", + "@typescript-eslint/visitor-keys": "8.10.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "packages/dev/node_modules/@typescript-eslint/type-utils": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.10.0.tgz", + "integrity": "sha512-PCpUOpyQSpxBn230yIcK+LeCQaXuxrgCm2Zk1S+PTIRJsEfU6nJ0TtwyH8pIwPK/vJoA+7TZtzyAJSGBz+s/dg==", + "dependencies": { + "@typescript-eslint/typescript-estree": "8.10.0", + "@typescript-eslint/utils": "8.10.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "packages/dev/node_modules/@typescript-eslint/types": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.10.0.tgz", + "integrity": "sha512-k/E48uzsfJCRRbGLapdZgrX52csmWJ2rcowwPvOZ8lwPUv3xW6CcFeJAXgx4uJm+Ge4+a4tFOkdYvSpxhRhg1w==", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "packages/dev/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.10.0.tgz", + "integrity": "sha512-3OE0nlcOHaMvQ8Xu5gAfME3/tWVDpb/HxtpUZ1WeOAksZ/h/gwrBzCklaGzwZT97/lBbbxJ16dMA98JMEngW4w==", + "dependencies": { + "@typescript-eslint/types": "8.10.0", + "@typescript-eslint/visitor-keys": "8.10.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "packages/dev/node_modules/@typescript-eslint/utils": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.10.0.tgz", + "integrity": "sha512-Oq4uZ7JFr9d1ZunE/QKy5egcDRXT/FrS2z/nlxzPua2VHFtmMvFNDvpq1m/hq0ra+T52aUezfcjGRIB7vNJF9w==", + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.10.0", + "@typescript-eslint/types": "8.10.0", + "@typescript-eslint/typescript-estree": "8.10.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0" + } + }, + "packages/dev/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.10.0.tgz", + "integrity": "sha512-k8nekgqwr7FadWk548Lfph6V3r9OVqjzAIVskE7orMZR23cGJjAOVazsZSJW+ElyjfTM4wx/1g88Mi70DDtG9A==", + "dependencies": { + "@typescript-eslint/types": "8.10.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "packages/dev/node_modules/@vitest/coverage-v8": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", + "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.3.0", + "@bcoe/v8-coverage": "^1.0.2", + "ast-v8-to-istanbul": "^0.3.3", + "debug": "^4.4.1", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.6", + "istanbul-reports": "^3.1.7", + "magic-string": "^0.30.17", + "magicast": "^0.3.5", + "std-env": "^3.9.0", + "test-exclude": "^7.0.1", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@vitest/browser": "3.2.4", + "vitest": "3.2.4" + }, + "peerDependenciesMeta": { + "@vitest/browser": { + "optional": true + } + } + }, + "packages/dev/node_modules/@vitest/expect": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/dev/node_modules/@vitest/pretty-format": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", + "license": "MIT", + "dependencies": { + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/dev/node_modules/@vitest/runner": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", + "license": "MIT", + "dependencies": { + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/dev/node_modules/@vitest/snapshot": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "magic-string": "^0.30.17", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/dev/node_modules/@vitest/spy": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "license": "MIT", + "dependencies": { + "tinyspy": "^4.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/dev/node_modules/@vitest/utils": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/dev/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "packages/dev/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "license": "MIT" + }, + "packages/dev/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "packages/dev/node_modules/tinyrainbow": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "packages/dev/node_modules/tinyspy": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "packages/dev/node_modules/vite-node": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "license": "MIT", + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/dev/node_modules/vitest": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/debug": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "packages/dev/node_modules/vitest/node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.2.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "packages/facade": { + "name": "@restorecommerce/facade", + "version": "2.2.23", + "hasInstallScript": true, + "license": "MIT", + "dependencies": { "@apollo/gateway": "^2.9.3", "@apollo/server": "^4.11.2", "@apollo/subgraph": "^2.9.3", @@ -25411,267 +26772,1067 @@ "@restorecommerce/kafka-client": "^1.2.36", "@restorecommerce/logger": "^1.3.4", "@restorecommerce/rc-grpc-clients": "^5.1.58", - "@restorecommerce/service-config": "^1.1.3", - "array.prototype.flat": "^1.3.1", - "co": "^4.6.0", - "debug": "^4.3.5", - "es-dirname": "^0.1.0", - "graphql": "^16.9.0", - "graphql-upload": "^17.0.0", - "graphql-ws": "^5.16.0", - "handlebars": "^4.7.7", - "into-stream": "^8.0.0", - "jose": "^5.4.0", - "koa": "^2.15.3", - "koa-body": "^6.0.1", - "koa-bodyparser": "^4.4.1", - "koa-compose": "^4.1.0", - "koa-helmet": "^8.0.1", - "koa-mount": "^4.0.0", - "koa-router": "^13.0.1", - "koa-websocket": "^7.0.0", + "@restorecommerce/service-config": "^1.1.3", + "array.prototype.flat": "^1.3.1", + "co": "^4.6.0", + "debug": "^4.3.5", + "es-dirname": "^0.1.0", + "graphql": "^16.9.0", + "graphql-upload": "^17.0.0", + "graphql-ws": "^5.16.0", + "handlebars": "^4.7.7", + "into-stream": "^8.0.0", + "jose": "^5.4.0", + "koa": "^2.15.3", + "koa-body": "^6.0.1", + "koa-bodyparser": "^4.4.1", + "koa-compose": "^4.1.0", + "koa-helmet": "^8.0.1", + "koa-mount": "^4.0.0", + "koa-router": "^13.0.1", + "koa-websocket": "^7.0.0", + "lodash": "^4.17.21", + "lru-cache": "11.0.2", + "nanoid": "^5.0.7", + "nice-grpc": "^2.1.7", + "node-fetch": "^3.3.1", + "oauth": "^0.10.0", + "oidc-provider": "^8.4.6", + "request-ip": "^3.3.0", + "rxjs": "^7.8.1", + "ts-proto-descriptors": "2.0.0", + "ua-parser-js": "^2.0.0", + "uuid": "11.0.2", + "winston": "^3.9.0", + "ws": "^8.17.0" + }, + "devDependencies": { + "@graphql-codegen/core": "^4.0.2", + "@graphql-codegen/typescript": "^4.1.1", + "@graphql-codegen/typescript-resolvers": "^4.4.0", + "@types/array.prototype.flat": "^1.2.1", + "@types/co": "^4.6.3", + "@types/debug": "^4.1.8", + "@types/ejs": "^3.1.2", + "@types/express": "5.0.0", + "@types/express-serve-static-core": "5.0.1", + "@types/graphql-upload": "^16.0.7", + "@types/koa": "^2.15.0", + "@types/koa__cors": "^5.0.0", + "@types/koa-bodyparser": "^4.3.12", + "@types/koa-helmet": "^6.0.8", + "@types/koa-mount": "^4.0.5", + "@types/koa-router": "^7.4.8", + "@types/koa-websocket": "^5.0.10", + "@types/lodash": "^4.14.195", + "@types/lru-cache": "^7.10.10", + "@types/node": "22.8.6", + "@types/oauth": "^0.9.5", + "@types/oidc-provider": "^8.4.4", + "@types/request-ip": "^0.0.41", + "@types/supertest": "^6.0.2", + "@types/useragent": "^2.3.1", + "@types/uuid": "10.0.0", + "@types/ws": "^8.5.4", + "eslint": "^9.4.0", + "nodemon": "^3.1.3", + "npm-run-all": "^4.1.5", + "rimraf": "6.0.1", + "supertest": "^7.0.0", + "tsconfig-paths": "^4.2.0", + "tsx": "^4.13.2", + "typescript": "^5.3.3", + "vitest": "^2.1.4" + } + }, + "packages/facade/node_modules/@types/koa-bodyparser": { + "version": "4.3.12", + "resolved": "https://registry.npmjs.org/@types/koa-bodyparser/-/koa-bodyparser-4.3.12.tgz", + "integrity": "sha512-hKMmRMVP889gPIdLZmmtou/BijaU1tHPyMNmcK7FAHAdATnRcGQQy78EqTTxLH1D4FTsrxIzklAQCso9oGoebQ==", + "dev": true, + "dependencies": { + "@types/koa": "*" + } + }, + "packages/facade/node_modules/@types/node": { + "version": "22.8.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.8.6.tgz", + "integrity": "sha512-tosuJYKrIqjQIlVCM4PEGxOmyg3FCPa/fViuJChnGeEIhjA46oy8FMVoF9su1/v8PNs2a8Q0iFNyOx0uOF91nw==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.8" + } + }, + "packages/facade/node_modules/helmet": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/helmet/-/helmet-8.0.0.tgz", + "integrity": "sha512-VyusHLEIIO5mjQPUI1wpOAEu+wl6Q0998jzTxqUYGE45xCIcAxy3MsbEK/yyJUJ3ADeMoB6MornPH6GMWAf+Pw==", + "peer": true, + "engines": { + "node": ">=18.0.0" + } + }, + "packages/facade/node_modules/koa-helmet": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/koa-helmet/-/koa-helmet-8.0.1.tgz", + "integrity": "sha512-lXDqTqNLgqyOKEuCSF3MFReJmEQQ0GD0NzkdSe9dKH87NSMc5GxAA7H5mYaAT+UJypYkIS1lPNRqTuEUgl+l3Q==", + "engines": { + "node": ">= 18.0.0" + }, + "peerDependencies": { + "helmet": ">= 6" + } + }, + "packages/facade/node_modules/ua-parser-js": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-2.0.0.tgz", + "integrity": "sha512-SASgD4RlB7+SCMmlVNqrhPw0f/2pGawWBzJ2+LwGTD0GgNnrKGzPJDiraGHJDwW9Zm5DH2lTmUpqDpbZjJY4+Q==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/ua-parser-js" + }, + { + "type": "paypal", + "url": "https://paypal.me/faisalman" + }, + { + "type": "github", + "url": "https://github.com/sponsors/faisalman" + } + ], + "dependencies": { + "detect-europe-js": "^0.1.2", + "is-standalone-pwa": "^0.1.1", + "ua-is-frozen": "^0.1.2" + }, + "bin": { + "ua-parser-js": "script/cli.js" + }, + "engines": { + "node": "*" + } + }, + "packages/gql-bot": { + "name": "@restorecommerce/gql-bot", + "version": "1.0.9", + "license": "MIT", + "dependencies": { + "@apollo/client": "^3.11.8", + "chalk": "^5.3.0", + "color-hash": "^2.0.2", + "graphql": "^16.9.0", + "js-yaml": "^4.1.0", + "lodash": "^4.17.21", + "node-fetch": "^3.3.2", + "promise-streams": "^2.1.1", + "readdirp": "^4.0.2", + "through2": "^4.0.2", + "yaml-document-stream": "^1.1.0" + }, + "devDependencies": { + "@restorecommerce/dev": "^0.0.13", + "@types/color-hash": "^2.0.0", + "@types/node": "^22.7.9", + "@types/through2": "^2.0.41", + "@typescript-eslint/eslint-plugin": "^8.11.0", + "@typescript-eslint/parser": "^8.11.0", + "@vitest/coverage-v8": "^2.1.4", + "eslint": "^9.13.0", + "eslint-plugin-prefer-arrow-functions": "^3.1.4", + "msw": "^2.5.2", + "nock": "^13.5.5", + "npm-run-all": "^4.1.5", + "ts-node": "^10.9.1", + "typescript": "^5.1.6", + "vitest": "^2.1.4" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "packages/gql-bot/node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/grpc-client": { + "name": "@restorecommerce/grpc-client", + "version": "2.2.9", + "license": "MIT", + "dependencies": { + "@grpc/grpc-js": "^1.10.10", + "@grpc/proto-loader": "^0.7.13", + "@restorecommerce/logger": "^1.3.4", + "cls-rtracer": "^2.6.3", + "nice-grpc": "^2.1.9", + "nice-grpc-client-middleware-deadline": "^2.0.12", + "nice-grpc-client-middleware-retry": "^3.1.8", + "protobufjs": "^7.3.2", + "retry": "^0.13.1", + "rxjs": "^7.8.1", + "winston": "^3.13.0" + }, + "devDependencies": { + "@restorecommerce/dev": "^0.0.13", + "@types/lodash": "^4.17.5", + "@types/node": "22.8.4", + "@types/retry": "^0.12.5", + "@types/uuid": "^10.0.0", + "npm-run-all": "^4.1.5", + "rimraf": "6.0.1", + "ts-proto": "2.2.5", + "ts-proto-descriptors": "2.0.0", + "typescript": "^5.5.2", + "vitest": "^2.1.4" + }, + "engines": { + "node": ">= 20.0.0" + } + }, + "packages/grpc-client/node_modules/@types/node": { + "version": "22.8.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.8.4.tgz", + "integrity": "sha512-SpNNxkftTJOPk0oN+y2bIqurEXHTA2AOZ3EJDDKeJ5VzkvvORSvmQXGQarcOzWV1ac7DCaPBEdMDxBsM+d8jWw==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.8" + } + }, + "packages/handlebars-helperized": { + "name": "@restorecommerce/handlebars-helperized", + "version": "2.0.0", + "license": "MIT", + "dependencies": { + "handlebars": "^4.7.8", + "handlebars-layouts": "^3.1.4", + "juice": "11.0.0", + "moment-timezone": "^0.6.0" + }, + "devDependencies": { + "@restorecommerce/dev": "^0.0.13", + "@types/moment-timezone": "^0.5.13", + "@types/node": "^24.3.0", + "@vitest/coverage-v8": "^2.1.4", + "assert": "^2.1.0", + "eslint": "9.13.0", + "eslint-plugin-prefer-arrow-functions": "^3.2.4", + "is-generator": "^1.0.3", + "npm-run-all": "^4.1.5", + "typescript": "^5.2.2", + "vitest": "^2.1.4" + }, + "engines": { + "node": ">= 20.0.0" + } + }, + "packages/handlebars-helperized/node_modules/@eslint/js": { + "version": "9.13.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.13.0.tgz", + "integrity": "sha512-IFLyoY4d72Z5y/6o/BazFBezupzI/taV8sGumxTAVw3lXG9A6md1Dc34T9s1FoD/an9pJH8RHbAxsaEbBed9lA==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "packages/handlebars-helperized/node_modules/@humanwhocodes/retry": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", + "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", + "dev": true, + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "packages/handlebars-helperized/node_modules/@types/node": { + "version": "24.3.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.3.0.tgz", + "integrity": "sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.10.0" + } + }, + "packages/handlebars-helperized/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "packages/handlebars-helperized/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/handlebars-helperized/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "packages/handlebars-helperized/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/handlebars-helperized/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/handlebars-helperized/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/handlebars-helperized/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/handlebars-helperized/node_modules/eslint": { + "version": "9.13.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.13.0.tgz", + "integrity": "sha512-EYZK6SX6zjFHST/HRytOdA/zE72Cq/bfw45LSyuwrdvcclb/gqV8RRQxywOBEWO2+WDpva6UZa4CcDeJKzUCFA==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.11.0", + "@eslint/config-array": "^0.18.0", + "@eslint/core": "^0.7.0", + "@eslint/eslintrc": "^3.1.0", + "@eslint/js": "9.13.0", + "@eslint/plugin-kit": "^0.2.0", + "@humanfs/node": "^0.16.5", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.3.1", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.1.0", + "eslint-visitor-keys": "^4.1.0", + "espree": "^10.2.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "packages/handlebars-helperized/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "packages/handlebars-helperized/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "packages/handlebars-helperized/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "packages/handlebars-helperized/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "packages/handlebars-helperized/node_modules/undici-types": { + "version": "7.10.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", + "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==", + "dev": true, + "license": "MIT" + }, + "packages/kafka-client": { + "name": "@restorecommerce/kafka-client", + "version": "1.2.36", + "license": "MIT", + "dependencies": { + "@platformatic/kafka": "^1.11.0", + "@restorecommerce/logger": "^1.3.4", + "@restorecommerce/rc-grpc-clients": "^5.1.58", + "async": "^3.2.6", + "cls-rtracer": "^2.6.3", + "events": "^3.3.0", "lodash": "^4.17.21", - "lru-cache": "11.0.2", - "nanoid": "^5.0.7", - "nice-grpc": "^2.1.7", - "node-fetch": "^3.3.1", - "oauth": "^0.10.0", - "oidc-provider": "^8.4.6", - "request-ip": "^3.3.0", - "rxjs": "^7.8.1", - "ts-proto-descriptors": "2.0.0", - "ua-parser-js": "^2.0.0", - "uuid": "11.0.2", - "winston": "^3.9.0", - "ws": "^8.17.0" + "protobufjs": "^7.4.0", + "retry": "^0.13.1" + }, + "devDependencies": { + "@restorecommerce/dev": "^0.0.13", + "@types/async": "^3.2.23", + "@types/node": "^22.7.9", + "@vitest/coverage-v8": "^3.2.4", + "cross-env": "^7.0.3", + "eslint": "9.13.0", + "eslint-plugin-prefer-arrow-functions": "^3.2.4", + "npm-run-all": "^4.1.5", + "rimraf": "^6.0.1", + "ts-node": "^10.9.1", + "typescript": "^5.2.2", + "vitest": "^3.2.4" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "packages/kafka-client/node_modules/@bcoe/v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz", + "integrity": "sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "packages/kafka-client/node_modules/@eslint/js": { + "version": "9.13.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.13.0.tgz", + "integrity": "sha512-IFLyoY4d72Z5y/6o/BazFBezupzI/taV8sGumxTAVw3lXG9A6md1Dc34T9s1FoD/an9pJH8RHbAxsaEbBed9lA==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "packages/kafka-client/node_modules/@humanwhocodes/retry": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", + "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", + "dev": true, + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "packages/kafka-client/node_modules/@vitest/coverage-v8": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", + "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.3.0", + "@bcoe/v8-coverage": "^1.0.2", + "ast-v8-to-istanbul": "^0.3.3", + "debug": "^4.4.1", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.6", + "istanbul-reports": "^3.1.7", + "magic-string": "^0.30.17", + "magicast": "^0.3.5", + "std-env": "^3.9.0", + "test-exclude": "^7.0.1", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@vitest/browser": "3.2.4", + "vitest": "3.2.4" }, - "devDependencies": { - "@graphql-codegen/core": "^4.0.2", - "@graphql-codegen/typescript": "^4.1.1", - "@graphql-codegen/typescript-resolvers": "^4.4.0", - "@types/array.prototype.flat": "^1.2.1", - "@types/co": "^4.6.3", - "@types/debug": "^4.1.8", - "@types/ejs": "^3.1.2", - "@types/express": "5.0.0", - "@types/express-serve-static-core": "5.0.1", - "@types/graphql-upload": "^16.0.7", - "@types/koa": "^2.15.0", - "@types/koa__cors": "^5.0.0", - "@types/koa-bodyparser": "^4.3.12", - "@types/koa-helmet": "^6.0.8", - "@types/koa-mount": "^4.0.5", - "@types/koa-router": "^7.4.8", - "@types/koa-websocket": "^5.0.10", - "@types/lodash": "^4.14.195", - "@types/lru-cache": "^7.10.10", - "@types/node": "22.8.6", - "@types/oauth": "^0.9.5", - "@types/oidc-provider": "^8.4.4", - "@types/request-ip": "^0.0.41", - "@types/supertest": "^6.0.2", - "@types/useragent": "^2.3.1", - "@types/uuid": "10.0.0", - "@types/ws": "^8.5.4", - "eslint": "^9.4.0", - "nodemon": "^3.1.3", - "npm-run-all": "^4.1.5", - "rimraf": "6.0.1", - "supertest": "^7.0.0", - "tsconfig-paths": "^4.2.0", - "tsx": "^4.13.2", - "typescript": "^5.3.3", - "vitest": "^2.1.4" + "peerDependenciesMeta": { + "@vitest/browser": { + "optional": true + } + } + }, + "packages/kafka-client/node_modules/@vitest/expect": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/kafka-client/node_modules/@vitest/pretty-format": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/kafka-client/node_modules/@vitest/runner": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/kafka-client/node_modules/@vitest/snapshot": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "magic-string": "^0.30.17", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/kafka-client/node_modules/@vitest/spy": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyspy": "^4.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/kafka-client/node_modules/@vitest/utils": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/kafka-client/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "packages/kafka-client/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/kafka-client/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "packages/kafka-client/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/kafka-client/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/kafka-client/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/kafka-client/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/kafka-client/node_modules/eslint": { + "version": "9.13.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.13.0.tgz", + "integrity": "sha512-EYZK6SX6zjFHST/HRytOdA/zE72Cq/bfw45LSyuwrdvcclb/gqV8RRQxywOBEWO2+WDpva6UZa4CcDeJKzUCFA==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.11.0", + "@eslint/config-array": "^0.18.0", + "@eslint/core": "^0.7.0", + "@eslint/eslintrc": "^3.1.0", + "@eslint/js": "9.13.0", + "@eslint/plugin-kit": "^0.2.0", + "@humanfs/node": "^0.16.5", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.3.1", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.1.0", + "eslint-visitor-keys": "^4.1.0", + "espree": "^10.2.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } } }, - "packages/facade/node_modules/@types/koa-bodyparser": { - "version": "4.3.12", - "resolved": "https://registry.npmjs.org/@types/koa-bodyparser/-/koa-bodyparser-4.3.12.tgz", - "integrity": "sha512-hKMmRMVP889gPIdLZmmtou/BijaU1tHPyMNmcK7FAHAdATnRcGQQy78EqTTxLH1D4FTsrxIzklAQCso9oGoebQ==", + "packages/kafka-client/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "packages/kafka-client/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "packages/kafka-client/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, "dependencies": { - "@types/koa": "*" + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" } }, - "packages/facade/node_modules/@types/node": { - "version": "22.8.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.8.6.tgz", - "integrity": "sha512-tosuJYKrIqjQIlVCM4PEGxOmyg3FCPa/fViuJChnGeEIhjA46oy8FMVoF9su1/v8PNs2a8Q0iFNyOx0uOF91nw==", + "packages/kafka-client/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "packages/kafka-client/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { - "undici-types": "~6.19.8" + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" } }, - "packages/facade/node_modules/helmet": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/helmet/-/helmet-8.0.0.tgz", - "integrity": "sha512-VyusHLEIIO5mjQPUI1wpOAEu+wl6Q0998jzTxqUYGE45xCIcAxy3MsbEK/yyJUJ3ADeMoB6MornPH6GMWAf+Pw==", - "peer": true, + "packages/kafka-client/node_modules/tinyrainbow": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "dev": true, + "license": "MIT", "engines": { - "node": ">=18.0.0" + "node": ">=14.0.0" } }, - "packages/facade/node_modules/koa-helmet": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/koa-helmet/-/koa-helmet-8.0.1.tgz", - "integrity": "sha512-lXDqTqNLgqyOKEuCSF3MFReJmEQQ0GD0NzkdSe9dKH87NSMc5GxAA7H5mYaAT+UJypYkIS1lPNRqTuEUgl+l3Q==", + "packages/kafka-client/node_modules/tinyspy": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", + "dev": true, + "license": "MIT", "engines": { - "node": ">= 18.0.0" - }, - "peerDependencies": { - "helmet": ">= 6" + "node": ">=14.0.0" } }, - "packages/facade/node_modules/ua-parser-js": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-2.0.0.tgz", - "integrity": "sha512-SASgD4RlB7+SCMmlVNqrhPw0f/2pGawWBzJ2+LwGTD0GgNnrKGzPJDiraGHJDwW9Zm5DH2lTmUpqDpbZjJY4+Q==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/ua-parser-js" - }, - { - "type": "paypal", - "url": "https://paypal.me/faisalman" - }, - { - "type": "github", - "url": "https://github.com/sponsors/faisalman" - } - ], + "packages/kafka-client/node_modules/vite-node": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "dev": true, + "license": "MIT", "dependencies": { - "detect-europe-js": "^0.1.2", - "is-standalone-pwa": "^0.1.1", - "ua-is-frozen": "^0.1.2" + "cac": "^6.7.14", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { - "ua-parser-js": "script/cli.js" + "vite-node": "vite-node.mjs" }, "engines": { - "node": "*" + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" } }, - "packages/gql-bot": { - "name": "@restorecommerce/gql-bot", - "version": "1.0.9", + "packages/kafka-client/node_modules/vitest": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "dev": true, "license": "MIT", "dependencies": { - "@apollo/client": "^3.11.8", - "chalk": "^5.3.0", - "color-hash": "^2.0.2", - "graphql": "^16.9.0", - "js-yaml": "^4.1.0", - "lodash": "^4.17.21", - "node-fetch": "^3.3.2", - "promise-streams": "^2.1.1", - "readdirp": "^4.0.2", - "through2": "^4.0.2", - "yaml-document-stream": "^1.1.0" + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", + "why-is-node-running": "^2.3.0" }, - "devDependencies": { - "@restorecommerce/dev": "^0.0.13", - "@types/color-hash": "^2.0.0", - "@types/node": "^22.7.9", - "@types/through2": "^2.0.41", - "@typescript-eslint/eslint-plugin": "^8.11.0", - "@typescript-eslint/parser": "^8.11.0", - "@vitest/coverage-v8": "^2.1.4", - "eslint": "^9.13.0", - "eslint-plugin-prefer-arrow-functions": "^3.1.4", - "msw": "^2.5.2", - "nock": "^13.5.5", - "npm-run-all": "^4.1.5", - "ts-node": "^10.9.1", - "typescript": "^5.1.6", - "vitest": "^2.1.4" + "bin": { + "vitest": "vitest.mjs" }, "engines": { - "node": ">= 12.0.0" + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/debug": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } } }, - "packages/gql-bot/node_modules/chalk": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", - "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" + "packages/kafka-client/node_modules/vitest/node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.2.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } } }, - "packages/grpc-client": { - "name": "@restorecommerce/grpc-client", - "version": "2.2.9", + "packages/koa-health-check": { + "name": "@restorecommerce/koa-health-check", + "version": "1.0.9", "license": "MIT", "dependencies": { - "@grpc/grpc-js": "^1.10.10", - "@grpc/proto-loader": "^0.7.13", - "@restorecommerce/logger": "^1.3.4", - "cls-rtracer": "^2.6.3", - "nice-grpc": "^2.1.9", - "nice-grpc-client-middleware-deadline": "^2.0.12", - "nice-grpc-client-middleware-retry": "^3.1.8", - "protobufjs": "^7.3.2", - "retry": "^0.13.1", - "rxjs": "^7.8.1", - "winston": "^3.13.0" + "@cloudnative/health": "^2.1.2", + "connect": "^3.7.0", + "koa": "^2.15.3", + "npm-run-all": "^4.1.5", + "rimraf": "6.0.1", + "typescript": "^5.2.2" }, "devDependencies": { "@restorecommerce/dev": "^0.0.13", - "@types/lodash": "^4.17.5", - "@types/node": "22.8.4", - "@types/retry": "^0.12.5", - "@types/uuid": "^10.0.0", - "npm-run-all": "^4.1.5", - "rimraf": "6.0.1", - "ts-proto": "2.2.5", - "ts-proto-descriptors": "2.0.0", - "typescript": "^5.5.2", + "@types/koa": "^2.13.11", + "@types/node": "^22.7.9", + "@types/sinon": "^17.0.1", + "eslint": "^9.13.0", + "eslint-plugin-prefer-arrow-functions": "^3.2.4", + "source-map-support": "^0.5.21", + "ts-node": "^10.9.1", "vitest": "^2.1.4" }, "engines": { - "node": ">= 20.0.0" - } - }, - "packages/grpc-client/node_modules/@types/node": { - "version": "22.8.4", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.8.4.tgz", - "integrity": "sha512-SpNNxkftTJOPk0oN+y2bIqurEXHTA2AOZ3EJDDKeJ5VzkvvORSvmQXGQarcOzWV1ac7DCaPBEdMDxBsM+d8jWw==", - "dev": true, - "dependencies": { - "undici-types": "~6.19.8" + "node": ">= 8.0.0" } }, - "packages/handlebars-helperized": { - "name": "@restorecommerce/handlebars-helperized", - "version": "2.0.0", + "packages/koa-req-res-logger": { + "name": "@restorecommerce/koa-req-res-logger", + "version": "1.1.10", "license": "MIT", "dependencies": { - "handlebars": "^4.7.8", - "handlebars-layouts": "^3.1.4", - "juice": "11.0.0", - "moment-timezone": "^0.6.0" + "debug": "^4.3.5", + "winston": "^3.11.0" }, "devDependencies": { - "@restorecommerce/dev": "^0.0.13", - "@types/moment-timezone": "^0.5.13", - "@types/node": "^24.3.0", - "@vitest/coverage-v8": "^2.1.4", - "assert": "^2.1.0", + "@types/node": "^22.7.9", "eslint": "9.13.0", "eslint-plugin-prefer-arrow-functions": "^3.2.4", "is-generator": "^1.0.3", + "isomorphic-fetch": "^3.0.0", + "koa": "^2.15.3", + "koa-bodyparser": "^4.4.1", "npm-run-all": "^4.1.5", + "should": "^13.2.3", "typescript": "^5.2.2", "vitest": "^2.1.4" }, "engines": { - "node": ">= 20.0.0" + "node": ">= 8.0.0" } }, - "packages/handlebars-helperized/node_modules/@eslint/js": { + "packages/koa-req-res-logger/node_modules/@eslint/js": { "version": "9.13.0", "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.13.0.tgz", "integrity": "sha512-IFLyoY4d72Z5y/6o/BazFBezupzI/taV8sGumxTAVw3lXG9A6md1Dc34T9s1FoD/an9pJH8RHbAxsaEbBed9lA==", @@ -25680,7 +27841,7 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, - "packages/handlebars-helperized/node_modules/@humanwhocodes/retry": { + "packages/koa-req-res-logger/node_modules/@humanwhocodes/retry": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", @@ -25693,17 +27854,7 @@ "url": "https://github.com/sponsors/nzakas" } }, - "packages/handlebars-helperized/node_modules/@types/node": { - "version": "24.3.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.3.0.tgz", - "integrity": "sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~7.10.0" - } - }, - "packages/handlebars-helperized/node_modules/ajv": { + "packages/koa-req-res-logger/node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", @@ -25719,7 +27870,7 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "packages/handlebars-helperized/node_modules/ansi-styles": { + "packages/koa-req-res-logger/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", @@ -25734,7 +27885,7 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "packages/handlebars-helperized/node_modules/brace-expansion": { + "packages/koa-req-res-logger/node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", @@ -25744,7 +27895,7 @@ "concat-map": "0.0.1" } }, - "packages/handlebars-helperized/node_modules/chalk": { + "packages/koa-req-res-logger/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", @@ -25760,7 +27911,7 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "packages/handlebars-helperized/node_modules/color-convert": { + "packages/koa-req-res-logger/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -25772,13 +27923,13 @@ "node": ">=7.0.0" } }, - "packages/handlebars-helperized/node_modules/color-name": { + "packages/koa-req-res-logger/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "packages/handlebars-helperized/node_modules/escape-string-regexp": { + "packages/koa-req-res-logger/node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", @@ -25790,7 +27941,7 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "packages/handlebars-helperized/node_modules/eslint": { + "packages/koa-req-res-logger/node_modules/eslint": { "version": "9.13.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.13.0.tgz", "integrity": "sha512-EYZK6SX6zjFHST/HRytOdA/zE72Cq/bfw45LSyuwrdvcclb/gqV8RRQxywOBEWO2+WDpva6UZa4CcDeJKzUCFA==", @@ -25850,7 +28001,7 @@ } } }, - "packages/handlebars-helperized/node_modules/has-flag": { + "packages/koa-req-res-logger/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -25859,13 +28010,13 @@ "node": ">=8" } }, - "packages/handlebars-helperized/node_modules/json-schema-traverse": { + "packages/koa-req-res-logger/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, - "packages/handlebars-helperized/node_modules/minimatch": { + "packages/koa-req-res-logger/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", @@ -25877,7 +28028,7 @@ "node": "*" } }, - "packages/handlebars-helperized/node_modules/supports-color": { + "packages/koa-req-res-logger/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", @@ -25889,201 +28040,88 @@ "node": ">=8" } }, - "packages/handlebars-helperized/node_modules/undici-types": { - "version": "7.10.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", - "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==", - "dev": true, - "license": "MIT" - }, - "packages/kafka-client": { - "name": "@restorecommerce/kafka-client", - "version": "1.2.36", + "packages/logger": { + "name": "@restorecommerce/logger", + "version": "1.3.4", "license": "MIT", "dependencies": { - "@platformatic/kafka": "^1.11.0", - "@restorecommerce/logger": "^1.3.4", - "@restorecommerce/rc-grpc-clients": "^5.1.58", - "async": "^3.2.6", + "cls-hooked": "^4.2.2", "cls-rtracer": "^2.6.3", - "events": "^3.3.0", "lodash": "^4.17.21", - "protobufjs": "^7.4.0", - "retry": "^0.13.1" + "source-map-support": "^0.5.21", + "winston": "^3.15.0", + "winston-elasticsearch": "^0.19.0" }, "devDependencies": { "@restorecommerce/dev": "^0.0.13", - "@types/async": "^3.2.23", + "@types/lodash": "^4.17.12", "@types/node": "^22.7.9", - "@vitest/coverage-v8": "^3.2.4", - "cross-env": "^7.0.3", - "eslint": "9.13.0", - "eslint-plugin-prefer-arrow-functions": "^3.2.4", + "@types/should": "^13.0.0", + "eslint": "^9.12.0", + "eslint-plugin-prefer-arrow-functions": "^3.4.1", "npm-run-all": "^4.1.5", - "rimraf": "^6.0.1", - "ts-node": "^10.9.1", - "typescript": "^5.2.2", - "vitest": "^3.2.4" - }, - "engines": { - "node": ">= 12.0.0" - } - }, - "packages/kafka-client/node_modules/@bcoe/v8-coverage": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz", - "integrity": "sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "packages/kafka-client/node_modules/@eslint/js": { - "version": "9.13.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.13.0.tgz", - "integrity": "sha512-IFLyoY4d72Z5y/6o/BazFBezupzI/taV8sGumxTAVw3lXG9A6md1Dc34T9s1FoD/an9pJH8RHbAxsaEbBed9lA==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "packages/kafka-client/node_modules/@humanwhocodes/retry": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", - "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", - "dev": true, - "engines": { - "node": ">=18.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "packages/kafka-client/node_modules/@vitest/coverage-v8": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", - "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "^2.3.0", - "@bcoe/v8-coverage": "^1.0.2", - "ast-v8-to-istanbul": "^0.3.3", - "debug": "^4.4.1", - "istanbul-lib-coverage": "^3.2.2", - "istanbul-lib-report": "^3.0.1", - "istanbul-lib-source-maps": "^5.0.6", - "istanbul-reports": "^3.1.7", - "magic-string": "^0.30.17", - "magicast": "^0.3.5", - "std-env": "^3.9.0", - "test-exclude": "^7.0.1", - "tinyrainbow": "^2.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "@vitest/browser": "3.2.4", - "vitest": "3.2.4" - }, - "peerDependenciesMeta": { - "@vitest/browser": { - "optional": true - } - } - }, - "packages/kafka-client/node_modules/@vitest/expect": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", - "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/chai": "^5.2.2", - "@vitest/spy": "3.2.4", - "@vitest/utils": "3.2.4", - "chai": "^5.2.0", - "tinyrainbow": "^2.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "packages/kafka-client/node_modules/@vitest/pretty-format": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", - "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", - "dev": true, - "license": "MIT", - "dependencies": { - "tinyrainbow": "^2.0.0" + "rimraf": "^6.0.1", + "typescript": "^5.6.3", + "vitest": "^2.1.4" }, - "funding": { - "url": "https://opencollective.com/vitest" + "engines": { + "node": ">= 12.18.0" } }, - "packages/kafka-client/node_modules/@vitest/runner": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", - "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", - "dev": true, + "packages/mailer": { + "name": "@restorecommerce/mailer", + "version": "1.0.12", "license": "MIT", "dependencies": { - "@vitest/utils": "3.2.4", - "pathe": "^2.0.3", - "strip-literal": "^3.0.0" + "nodemailer": "^6.9.15", + "nodemailer-html-to-text": "^3.2.0", + "nodemailer-mandrill-transport": "^1.2.0", + "nodemailer-stub-transport": "^1.1.0", + "nodemailer-wellknown": "^0.2.3", + "winston": "^3.11.0" }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "packages/kafka-client/node_modules/@vitest/snapshot": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", - "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/pretty-format": "3.2.4", - "magic-string": "^0.30.17", - "pathe": "^2.0.3" + "devDependencies": { + "@restorecommerce/dev": "^0.0.13", + "@types/node": "^22.7.9", + "@types/nodemailer": "^6.4.16", + "@types/nodemailer-html-to-text": "^3.1.3", + "@types/nodemailer-stub-transport": "^1.1.8", + "assert": "^2.1.0", + "eslint": "9.13.0", + "eslint-plugin-prefer-arrow-functions": "^3.2.4", + "is-generator": "^1.0.3", + "npm-run-all": "^4.1.5", + "should": "^13.2.3", + "typescript": "^5.2.2", + "vitest": "^2.1.4" }, - "funding": { - "url": "https://opencollective.com/vitest" + "engines": { + "node": ">= 10.0.0" } }, - "packages/kafka-client/node_modules/@vitest/spy": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", - "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "packages/mailer/node_modules/@eslint/js": { + "version": "9.13.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.13.0.tgz", + "integrity": "sha512-IFLyoY4d72Z5y/6o/BazFBezupzI/taV8sGumxTAVw3lXG9A6md1Dc34T9s1FoD/an9pJH8RHbAxsaEbBed9lA==", "dev": true, - "license": "MIT", - "dependencies": { - "tinyspy": "^4.0.3" - }, - "funding": { - "url": "https://opencollective.com/vitest" + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, - "packages/kafka-client/node_modules/@vitest/utils": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", - "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "packages/mailer/node_modules/@humanwhocodes/retry": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", + "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/pretty-format": "3.2.4", - "loupe": "^3.1.4", - "tinyrainbow": "^2.0.0" + "engines": { + "node": ">=18.18" }, "funding": { - "url": "https://opencollective.com/vitest" + "type": "github", + "url": "https://github.com/sponsors/nzakas" } }, - "packages/kafka-client/node_modules/ajv": { + "packages/mailer/node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", @@ -26099,7 +28137,7 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "packages/kafka-client/node_modules/ansi-styles": { + "packages/mailer/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", @@ -26114,7 +28152,7 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "packages/kafka-client/node_modules/brace-expansion": { + "packages/mailer/node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", @@ -26124,7 +28162,7 @@ "concat-map": "0.0.1" } }, - "packages/kafka-client/node_modules/chalk": { + "packages/mailer/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", @@ -26140,7 +28178,7 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "packages/kafka-client/node_modules/color-convert": { + "packages/mailer/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -26152,13 +28190,13 @@ "node": ">=7.0.0" } }, - "packages/kafka-client/node_modules/color-name": { + "packages/mailer/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "packages/kafka-client/node_modules/escape-string-regexp": { + "packages/mailer/node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", @@ -26170,7 +28208,7 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "packages/kafka-client/node_modules/eslint": { + "packages/mailer/node_modules/eslint": { "version": "9.13.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.13.0.tgz", "integrity": "sha512-EYZK6SX6zjFHST/HRytOdA/zE72Cq/bfw45LSyuwrdvcclb/gqV8RRQxywOBEWO2+WDpva6UZa4CcDeJKzUCFA==", @@ -26230,541 +28268,743 @@ } } }, - "packages/kafka-client/node_modules/has-flag": { + "packages/mailer/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, "engines": { - "node": ">=8" + "node": ">=8" + } + }, + "packages/mailer/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "packages/mailer/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "packages/mailer/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "packages/protos": { + "name": "@restorecommerce/protos", + "version": "6.11.9", + "license": "MIT" + }, + "packages/rc-grpc-clients": { + "name": "@restorecommerce/rc-grpc-clients", + "version": "5.1.58", + "license": "MIT", + "dependencies": { + "@grpc/grpc-js": "^1.12.2", + "@restorecommerce/grpc-client": "^2.2.9", + "@types/google-protobuf": "^3.15.10", + "google-protobuf": "^3.21.4", + "nice-grpc": "^2.1.7", + "nice-grpc-common": "^2.0.2", + "protobufjs": "^7.2.5", + "rxjs": "^7.8.1", + "ts-proto-descriptors": "^2.0.0" + }, + "devDependencies": { + "@restorecommerce/dev": "^0.0.13", + "@restorecommerce/protos": "^6.11.9", + "@types/node": "^22.7.9", + "npm-run-all": "^4.1.5", + "protoc": "^1.1.3", + "rimraf": "6.0.1", + "ts-proto": "^2.2.5", + "typescript": "^5.2.2", + "vitest": "^2.1.4" + }, + "engines": { + "node": ">= 12.18.0" + } + }, + "packages/resource-base-interface": { + "name": "@restorecommerce/resource-base-interface", + "version": "1.8.0", + "license": "MIT", + "dependencies": { + "@restorecommerce/acs-client": "^3.0.23", + "@restorecommerce/chassis-srv": "^1.6.8", + "@restorecommerce/grpc-client": "^2.2.8", + "@restorecommerce/kafka-client": "1.2.36", + "@restorecommerce/rc-grpc-clients": "5.1.55", + "@restorecommerce/scs-jobs": "^0.1.63", + "@restorecommerce/service-config": "^1.1.3", + "lodash": "^4.17.21", + "nice-grpc-server-reflection": "^2.0.12", + "redis": "^4.7.0" + }, + "devDependencies": { + "@restorecommerce/dev": "^0.0.13", + "@types/lodash": "^4.17.5", + "@types/nconf": "^0.10.7", + "@types/redis": "^4.0.11", + "@vitest/coverage-v8": "^3.2.4", + "arangojs": "^8.7.0", + "cross-env": "^7.0.3", + "esbuild": "^0.25.9", + "eslint": "^8.57.0", + "npm-run-all": "^4.1.5", + "rimraf": "^6.0.1", + "should": "^13.2.3", + "typescript": "^5.6.3", + "vitest": "^3.2.4" + }, + "engines": { + "node": ">= 20.0.0" + } + }, + "packages/resource-base-interface/node_modules/@bcoe/v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz", + "integrity": "sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "packages/resource-base-interface/node_modules/@esbuild/aix-ppc64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.9.tgz", + "integrity": "sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "packages/resource-base-interface/node_modules/@esbuild/android-arm": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.9.tgz", + "integrity": "sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "packages/resource-base-interface/node_modules/@esbuild/android-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.9.tgz", + "integrity": "sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "packages/resource-base-interface/node_modules/@esbuild/android-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.9.tgz", + "integrity": "sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" } }, - "packages/kafka-client/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true + "packages/resource-base-interface/node_modules/@esbuild/darwin-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.9.tgz", + "integrity": "sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } }, - "packages/kafka-client/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "packages/resource-base-interface/node_modules/@esbuild/darwin-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.9.tgz", + "integrity": "sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": "*" + "node": ">=18" } }, - "packages/kafka-client/node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "packages/resource-base-interface/node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.9.tgz", + "integrity": "sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==", + "cpu": [ + "arm64" + ], "dev": true, - "license": "MIT" + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } }, - "packages/kafka-client/node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "packages/resource-base-interface/node_modules/@esbuild/freebsd-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.9.tgz", + "integrity": "sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "has-flag": "^4.0.0" - }, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], "engines": { - "node": ">=8" + "node": ">=18" } }, - "packages/kafka-client/node_modules/tinyrainbow": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", - "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "packages/resource-base-interface/node_modules/@esbuild/linux-arm": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.9.tgz", + "integrity": "sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==", + "cpu": [ + "arm" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=14.0.0" + "node": ">=18" } }, - "packages/kafka-client/node_modules/tinyspy": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", - "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", + "packages/resource-base-interface/node_modules/@esbuild/linux-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.9.tgz", + "integrity": "sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==", + "cpu": [ + "arm64" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=14.0.0" + "node": ">=18" } }, - "packages/kafka-client/node_modules/vite-node": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", - "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "packages/resource-base-interface/node_modules/@esbuild/linux-ia32": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.9.tgz", + "integrity": "sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==", + "cpu": [ + "ia32" + ], "dev": true, "license": "MIT", - "dependencies": { - "cac": "^6.7.14", - "debug": "^4.4.1", - "es-module-lexer": "^1.7.0", - "pathe": "^2.0.3", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" - }, - "bin": { - "vite-node": "vite-node.mjs" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" + "node": ">=18" } }, - "packages/kafka-client/node_modules/vitest": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", - "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "packages/resource-base-interface/node_modules/@esbuild/linux-loong64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.9.tgz", + "integrity": "sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==", + "cpu": [ + "loong64" + ], "dev": true, "license": "MIT", - "dependencies": { - "@types/chai": "^5.2.2", - "@vitest/expect": "3.2.4", - "@vitest/mocker": "3.2.4", - "@vitest/pretty-format": "^3.2.4", - "@vitest/runner": "3.2.4", - "@vitest/snapshot": "3.2.4", - "@vitest/spy": "3.2.4", - "@vitest/utils": "3.2.4", - "chai": "^5.2.0", - "debug": "^4.4.1", - "expect-type": "^1.2.1", - "magic-string": "^0.30.17", - "pathe": "^2.0.3", - "picomatch": "^4.0.2", - "std-env": "^3.9.0", - "tinybench": "^2.9.0", - "tinyexec": "^0.3.2", - "tinyglobby": "^0.2.14", - "tinypool": "^1.1.1", - "tinyrainbow": "^2.0.0", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", - "vite-node": "3.2.4", - "why-is-node-running": "^2.3.0" - }, - "bin": { - "vitest": "vitest.mjs" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "@edge-runtime/vm": "*", - "@types/debug": "^4.1.12", - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.2.4", - "@vitest/ui": "3.2.4", - "happy-dom": "*", - "jsdom": "*" - }, - "peerDependenciesMeta": { - "@edge-runtime/vm": { - "optional": true - }, - "@types/debug": { - "optional": true - }, - "@types/node": { - "optional": true - }, - "@vitest/browser": { - "optional": true - }, - "@vitest/ui": { - "optional": true - }, - "happy-dom": { - "optional": true - }, - "jsdom": { - "optional": true - } + "node": ">=18" } }, - "packages/kafka-client/node_modules/vitest/node_modules/@vitest/mocker": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", - "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "packages/resource-base-interface/node_modules/@esbuild/linux-mips64el": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.9.tgz", + "integrity": "sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==", + "cpu": [ + "mips64el" + ], "dev": true, "license": "MIT", - "dependencies": { - "@vitest/spy": "3.2.4", - "estree-walker": "^3.0.3", - "magic-string": "^0.30.17" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "msw": "^2.4.9", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" - }, - "peerDependenciesMeta": { - "msw": { - "optional": true - }, - "vite": { - "optional": true - } + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" } }, - "packages/koa-health-check": { - "name": "@restorecommerce/koa-health-check", - "version": "1.0.9", + "packages/resource-base-interface/node_modules/@esbuild/linux-ppc64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.9.tgz", + "integrity": "sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==", + "cpu": [ + "ppc64" + ], + "dev": true, "license": "MIT", - "dependencies": { - "@cloudnative/health": "^2.1.2", - "connect": "^3.7.0", - "koa": "^2.15.3", - "npm-run-all": "^4.1.5", - "rimraf": "6.0.1", - "typescript": "^5.2.2" - }, - "devDependencies": { - "@restorecommerce/dev": "^0.0.13", - "@types/koa": "^2.13.11", - "@types/node": "^22.7.9", - "@types/sinon": "^17.0.1", - "eslint": "^9.13.0", - "eslint-plugin-prefer-arrow-functions": "^3.2.4", - "source-map-support": "^0.5.21", - "ts-node": "^10.9.1", - "vitest": "^2.1.4" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">= 8.0.0" + "node": ">=18" } }, - "packages/koa-req-res-logger": { - "name": "@restorecommerce/koa-req-res-logger", - "version": "1.1.10", + "packages/resource-base-interface/node_modules/@esbuild/linux-riscv64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.9.tgz", + "integrity": "sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==", + "cpu": [ + "riscv64" + ], + "dev": true, "license": "MIT", - "dependencies": { - "debug": "^4.3.5", - "winston": "^3.11.0" - }, - "devDependencies": { - "@types/node": "^22.7.9", - "eslint": "9.13.0", - "eslint-plugin-prefer-arrow-functions": "^3.2.4", - "is-generator": "^1.0.3", - "isomorphic-fetch": "^3.0.0", - "koa": "^2.15.3", - "koa-bodyparser": "^4.4.1", - "npm-run-all": "^4.1.5", - "should": "^13.2.3", - "typescript": "^5.2.2", - "vitest": "^2.1.4" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">= 8.0.0" + "node": ">=18" } }, - "packages/koa-req-res-logger/node_modules/@eslint/js": { - "version": "9.13.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.13.0.tgz", - "integrity": "sha512-IFLyoY4d72Z5y/6o/BazFBezupzI/taV8sGumxTAVw3lXG9A6md1Dc34T9s1FoD/an9pJH8RHbAxsaEbBed9lA==", + "packages/resource-base-interface/node_modules/@esbuild/linux-s390x": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.9.tgz", + "integrity": "sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==", + "cpu": [ + "s390x" + ], "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "node": ">=18" } }, - "packages/koa-req-res-logger/node_modules/@humanwhocodes/retry": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", - "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", + "packages/resource-base-interface/node_modules/@esbuild/linux-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.9.tgz", + "integrity": "sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==", + "cpu": [ + "x64" + ], "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=18.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" + "node": ">=18" } }, - "packages/koa-req-res-logger/node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "packages/resource-base-interface/node_modules/@esbuild/netbsd-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.9.tgz", + "integrity": "sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" } }, - "packages/koa-req-res-logger/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "packages/resource-base-interface/node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.9.tgz", + "integrity": "sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==", + "cpu": [ + "arm64" + ], "dev": true, - "dependencies": { - "color-convert": "^2.0.1" - }, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "node": ">=18" } }, - "packages/koa-req-res-logger/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "packages/resource-base-interface/node_modules/@esbuild/openbsd-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.9.tgz", + "integrity": "sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" } }, - "packages/koa-req-res-logger/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "packages/resource-base-interface/node_modules/@esbuild/sunos-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.9.tgz", + "integrity": "sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" + "node": ">=18" } }, - "packages/koa-req-res-logger/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "packages/resource-base-interface/node_modules/@esbuild/win32-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.9.tgz", + "integrity": "sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==", + "cpu": [ + "arm64" + ], "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=7.0.0" + "node": ">=18" } }, - "packages/koa-req-res-logger/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "packages/resource-base-interface/node_modules/@esbuild/win32-ia32": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.9.tgz", + "integrity": "sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } }, - "packages/koa-req-res-logger/node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "packages/resource-base-interface/node_modules/@esbuild/win32-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.9.tgz", + "integrity": "sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==", + "cpu": [ + "x64" + ], "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=18" } }, - "packages/koa-req-res-logger/node_modules/eslint": { - "version": "9.13.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.13.0.tgz", - "integrity": "sha512-EYZK6SX6zjFHST/HRytOdA/zE72Cq/bfw45LSyuwrdvcclb/gqV8RRQxywOBEWO2+WDpva6UZa4CcDeJKzUCFA==", + "packages/resource-base-interface/node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, + "license": "MIT", "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.11.0", - "@eslint/config-array": "^0.18.0", - "@eslint/core": "^0.7.0", - "@eslint/eslintrc": "^3.1.0", - "@eslint/js": "9.13.0", - "@eslint/plugin-kit": "^0.2.0", - "@humanfs/node": "^0.16.5", - "@humanwhocodes/module-importer": "^1.0.1", - "@humanwhocodes/retry": "^0.3.1", - "@types/estree": "^1.0.6", - "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", "debug": "^4.3.2", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.1.0", - "eslint-visitor-keys": "^4.1.0", - "espree": "^10.2.0", - "esquery": "^1.5.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^8.0.0", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", + "espree": "^9.6.0", + "globals": "^13.19.0", "ignore": "^5.2.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "lodash.merge": "^4.6.2", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", "minimatch": "^3.1.2", - "natural-compare": "^1.4.0", - "optionator": "^0.9.3", - "text-table": "^0.2.0" + "strip-json-comments": "^3.1.1" }, - "bin": { - "eslint": "bin/eslint.js" + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "packages/resource-base-interface/node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "packages/resource-base-interface/node_modules/@restorecommerce/rc-grpc-clients": { + "version": "5.1.55", + "resolved": "https://registry.npmjs.org/@restorecommerce/rc-grpc-clients/-/rc-grpc-clients-5.1.55.tgz", + "integrity": "sha512-6mMzGJahGaOQ4IJolnlMHoHUmMqWeeqfEg6xHZgxXrx0z9afcSA7H2SS3h/iSuQqHClCNlUV923U+QI+cjL3zg==", + "license": "MIT", + "dependencies": { + "@grpc/grpc-js": "^1.12.2", + "@restorecommerce/grpc-client": "^2.2.8", + "@types/google-protobuf": "^3.15.10", + "google-protobuf": "^3.21.4", + "nice-grpc": "^2.1.7", + "nice-grpc-common": "^2.0.2", + "protobufjs": "^7.2.5", + "rxjs": "^7.8.1", + "ts-proto-descriptors": "^2.0.0" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "node": ">= 12.18.0" + } + }, + "packages/resource-base-interface/node_modules/@vitest/coverage-v8": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", + "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.3.0", + "@bcoe/v8-coverage": "^1.0.2", + "ast-v8-to-istanbul": "^0.3.3", + "debug": "^4.4.1", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.6", + "istanbul-reports": "^3.1.7", + "magic-string": "^0.30.17", + "magicast": "^0.3.5", + "std-env": "^3.9.0", + "test-exclude": "^7.0.1", + "tinyrainbow": "^2.0.0" }, "funding": { - "url": "https://eslint.org/donate" + "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "jiti": "*" + "@vitest/browser": "3.2.4", + "vitest": "3.2.4" }, "peerDependenciesMeta": { - "jiti": { + "@vitest/browser": { "optional": true } } }, - "packages/koa-req-res-logger/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "packages/resource-base-interface/node_modules/@vitest/expect": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", "dev": true, - "engines": { - "node": ">=8" + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" } }, - "packages/koa-req-res-logger/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "packages/koa-req-res-logger/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "packages/resource-base-interface/node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", "dev": true, + "license": "MIT", "dependencies": { - "brace-expansion": "^1.1.7" + "@vitest/spy": "3.2.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" }, - "engines": { - "node": "*" + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } } }, - "packages/koa-req-res-logger/node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "packages/resource-base-interface/node_modules/@vitest/pretty-format": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", "dev": true, + "license": "MIT", "dependencies": { - "has-flag": "^4.0.0" + "tinyrainbow": "^2.0.0" }, - "engines": { - "node": ">=8" + "funding": { + "url": "https://opencollective.com/vitest" } }, - "packages/logger": { - "name": "@restorecommerce/logger", - "version": "1.3.4", + "packages/resource-base-interface/node_modules/@vitest/runner": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", + "dev": true, "license": "MIT", "dependencies": { - "cls-hooked": "^4.2.2", - "cls-rtracer": "^2.6.3", - "lodash": "^4.17.21", - "source-map-support": "^0.5.21", - "winston": "^3.15.0", - "winston-elasticsearch": "^0.19.0" - }, - "devDependencies": { - "@restorecommerce/dev": "^0.0.13", - "@types/lodash": "^4.17.12", - "@types/node": "^22.7.9", - "@types/should": "^13.0.0", - "eslint": "^9.12.0", - "eslint-plugin-prefer-arrow-functions": "^3.4.1", - "npm-run-all": "^4.1.5", - "rimraf": "^6.0.1", - "typescript": "^5.6.3", - "vitest": "^2.1.4" + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" }, - "engines": { - "node": ">= 12.18.0" + "funding": { + "url": "https://opencollective.com/vitest" } }, - "packages/mailer": { - "name": "@restorecommerce/mailer", - "version": "1.0.12", + "packages/resource-base-interface/node_modules/@vitest/snapshot": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", + "dev": true, "license": "MIT", "dependencies": { - "nodemailer": "^6.9.15", - "nodemailer-html-to-text": "^3.2.0", - "nodemailer-mandrill-transport": "^1.2.0", - "nodemailer-stub-transport": "^1.1.0", - "nodemailer-wellknown": "^0.2.3", - "winston": "^3.11.0" - }, - "devDependencies": { - "@restorecommerce/dev": "^0.0.13", - "@types/node": "^22.7.9", - "@types/nodemailer": "^6.4.16", - "@types/nodemailer-html-to-text": "^3.1.3", - "@types/nodemailer-stub-transport": "^1.1.8", - "assert": "^2.1.0", - "eslint": "9.13.0", - "eslint-plugin-prefer-arrow-functions": "^3.2.4", - "is-generator": "^1.0.3", - "npm-run-all": "^4.1.5", - "should": "^13.2.3", - "typescript": "^5.2.2", - "vitest": "^2.1.4" + "@vitest/pretty-format": "3.2.4", + "magic-string": "^0.30.17", + "pathe": "^2.0.3" }, - "engines": { - "node": ">= 10.0.0" + "funding": { + "url": "https://opencollective.com/vitest" } }, - "packages/mailer/node_modules/@eslint/js": { - "version": "9.13.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.13.0.tgz", - "integrity": "sha512-IFLyoY4d72Z5y/6o/BazFBezupzI/taV8sGumxTAVw3lXG9A6md1Dc34T9s1FoD/an9pJH8RHbAxsaEbBed9lA==", + "packages/resource-base-interface/node_modules/@vitest/spy": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "license": "MIT", + "dependencies": { + "tinyspy": "^4.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" } }, - "packages/mailer/node_modules/@humanwhocodes/retry": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", - "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", + "packages/resource-base-interface/node_modules/@vitest/utils": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", "dev": true, - "engines": { - "node": ">=18.18" + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", + "tinyrainbow": "^2.0.0" }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" + "url": "https://opencollective.com/vitest" } }, - "packages/mailer/node_modules/ajv": { + "packages/resource-base-interface/node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, + "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -26776,11 +29016,12 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "packages/mailer/node_modules/ansi-styles": { + "packages/resource-base-interface/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, + "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -26791,21 +29032,40 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "packages/mailer/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "packages/resource-base-interface/node_modules/arangojs": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/arangojs/-/arangojs-8.8.1.tgz", + "integrity": "sha512-gVc5BF91nT27lB97mt+XxcGbw7yOhPIkZ0f5Nmq/ZPt1/iP62rDpH961XUyWdzj5m4H8lx2OF/O2AVefZoolXg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/node": ">=14", + "multi-part": "^4.0.0", + "path-browserify": "^1.0.1", + "x3-linkedlist": "1.2.0", + "xhr": "^2.4.1" + }, + "engines": { + "node": ">=14" + } + }, + "packages/resource-base-interface/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, - "packages/mailer/node_modules/chalk": { + "packages/resource-base-interface/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -26817,11 +29077,12 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "packages/mailer/node_modules/color-convert": { + "packages/resource-base-interface/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, + "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -26829,17 +29090,61 @@ "node": ">=7.0.0" } }, - "packages/mailer/node_modules/color-name": { + "packages/resource-base-interface/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "dev": true, + "license": "MIT" }, - "packages/mailer/node_modules/escape-string-regexp": { + "packages/resource-base-interface/node_modules/esbuild": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.9.tgz", + "integrity": "sha512-CRbODhYyQx3qp7ZEwzxOk4JBqmD/seJrzPa/cGjY1VtIn5E09Oi9/dB4JwctnfZ8Q8iT7rioVv5k/FNT/uf54g==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.9", + "@esbuild/android-arm": "0.25.9", + "@esbuild/android-arm64": "0.25.9", + "@esbuild/android-x64": "0.25.9", + "@esbuild/darwin-arm64": "0.25.9", + "@esbuild/darwin-x64": "0.25.9", + "@esbuild/freebsd-arm64": "0.25.9", + "@esbuild/freebsd-x64": "0.25.9", + "@esbuild/linux-arm": "0.25.9", + "@esbuild/linux-arm64": "0.25.9", + "@esbuild/linux-ia32": "0.25.9", + "@esbuild/linux-loong64": "0.25.9", + "@esbuild/linux-mips64el": "0.25.9", + "@esbuild/linux-ppc64": "0.25.9", + "@esbuild/linux-riscv64": "0.25.9", + "@esbuild/linux-s390x": "0.25.9", + "@esbuild/linux-x64": "0.25.9", + "@esbuild/netbsd-arm64": "0.25.9", + "@esbuild/netbsd-x64": "0.25.9", + "@esbuild/openbsd-arm64": "0.25.9", + "@esbuild/openbsd-x64": "0.25.9", + "@esbuild/openharmony-arm64": "0.25.9", + "@esbuild/sunos-x64": "0.25.9", + "@esbuild/win32-arm64": "0.25.9", + "@esbuild/win32-ia32": "0.25.9", + "@esbuild/win32-x64": "0.25.9" + } + }, + "packages/resource-base-interface/node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -26847,86 +29152,195 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "packages/mailer/node_modules/eslint": { - "version": "9.13.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.13.0.tgz", - "integrity": "sha512-EYZK6SX6zjFHST/HRytOdA/zE72Cq/bfw45LSyuwrdvcclb/gqV8RRQxywOBEWO2+WDpva6UZa4CcDeJKzUCFA==", + "packages/resource-base-interface/node_modules/eslint": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", "dev": true, - "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.11.0", - "@eslint/config-array": "^0.18.0", - "@eslint/core": "^0.7.0", - "@eslint/eslintrc": "^3.1.0", - "@eslint/js": "9.13.0", - "@eslint/plugin-kit": "^0.2.0", - "@humanfs/node": "^0.16.5", + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", - "@humanwhocodes/retry": "^0.3.1", - "@types/estree": "^1.0.6", - "@types/json-schema": "^7.0.15", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", + "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.1.0", - "eslint-visitor-keys": "^4.1.0", - "espree": "^10.2.0", - "esquery": "^1.5.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^8.0.0", + "file-entry-cache": "^6.0.1", "find-up": "^5.0.0", "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", "text-table": "^0.2.0" }, "bin": { "eslint": "bin/eslint.js" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, "funding": { - "url": "https://eslint.org/donate" + "url": "https://opencollective.com/eslint" + } + }, + "packages/resource-base-interface/node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" }, - "peerDependencies": { - "jiti": "*" + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, - "peerDependenciesMeta": { - "jiti": { - "optional": true - } + "funding": { + "url": "https://opencollective.com/eslint" } }, - "packages/mailer/node_modules/has-flag": { + "packages/resource-base-interface/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "packages/resource-base-interface/node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "packages/resource-base-interface/node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "packages/resource-base-interface/node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "packages/resource-base-interface/node_modules/flat-cache/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "packages/resource-base-interface/node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/resource-base-interface/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, - "packages/mailer/node_modules/json-schema-traverse": { + "packages/resource-base-interface/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true + "dev": true, + "license": "MIT" }, - "packages/mailer/node_modules/minimatch": { + "packages/resource-base-interface/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, + "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -26934,11 +29348,19 @@ "node": "*" } }, - "packages/mailer/node_modules/supports-color": { + "packages/resource-base-interface/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "packages/resource-base-interface/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -26946,39 +29368,133 @@ "node": ">=8" } }, - "packages/protos": { - "name": "@restorecommerce/protos", - "version": "6.11.9", - "license": "MIT" + "packages/resource-base-interface/node_modules/tinyrainbow": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } }, - "packages/rc-grpc-clients": { - "name": "@restorecommerce/rc-grpc-clients", - "version": "5.1.58", + "packages/resource-base-interface/node_modules/tinyspy": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "packages/resource-base-interface/node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/resource-base-interface/node_modules/vite-node": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "dev": true, "license": "MIT", "dependencies": { - "@grpc/grpc-js": "^1.12.2", - "@restorecommerce/grpc-client": "^2.2.9", - "@types/google-protobuf": "^3.15.10", - "google-protobuf": "^3.21.4", - "nice-grpc": "^2.1.7", - "nice-grpc-common": "^2.0.2", - "protobufjs": "^7.2.5", - "rxjs": "^7.8.1", - "ts-proto-descriptors": "^2.0.0" + "cac": "^6.7.14", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, - "devDependencies": { - "@restorecommerce/dev": "^0.0.13", - "@restorecommerce/protos": "^6.11.9", - "@types/node": "^22.7.9", - "npm-run-all": "^4.1.5", - "protoc": "^1.1.3", - "rimraf": "6.0.1", - "ts-proto": "^2.2.5", - "typescript": "^5.2.2", - "vitest": "^2.1.4" + "bin": { + "vite-node": "vite-node.mjs" }, "engines": { - "node": ">= 12.18.0" + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "packages/resource-base-interface/node_modules/vitest": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/debug": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } } }, "packages/scs-jobs": { diff --git a/packages/chassis-srv/.editorconfig b/packages/chassis-srv/.editorconfig new file mode 100644 index 00000000..4a7ea303 --- /dev/null +++ b/packages/chassis-srv/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false diff --git a/packages/chassis-srv/.gitignore b/packages/chassis-srv/.gitignore new file mode 100644 index 00000000..c37047a4 --- /dev/null +++ b/packages/chassis-srv/.gitignore @@ -0,0 +1,21 @@ +*~ +*.bk +*.d.ts +*.js +*.log + +.directory +.idea +.nyc_output +.project +.settings +.vscode + +coverage/ +build/ +lib/ +node_modules/ +npm-debug.log + +!.eslintrc.js +!setupTopics.js diff --git a/packages/chassis-srv/.husky b/packages/chassis-srv/.husky new file mode 100644 index 00000000..3dbc0ecc --- /dev/null +++ b/packages/chassis-srv/.husky @@ -0,0 +1,12 @@ +#!/usr/bin/env sh +. "$(dirname -- "$0")/_/husky.sh" + +BRANCH=$(git rev-parse --abbrev-ref HEAD) + +if [ "$BRACH" = "main" ] +then + echo "committing directly to main is blocked" + exit 1 +fi + +./node_modules/.bin/commitlint -e $1 \ No newline at end of file diff --git a/packages/chassis-srv/.mocharc.json b/packages/chassis-srv/.mocharc.json new file mode 100644 index 00000000..8e40263a --- /dev/null +++ b/packages/chassis-srv/.mocharc.json @@ -0,0 +1,5 @@ +{ + "extension": ["ts"], + "spec": "test/**/*.spec.ts", + "require": "ts-node/register" +} diff --git a/packages/chassis-srv/.npmignore b/packages/chassis-srv/.npmignore new file mode 100644 index 00000000..4c96c248 --- /dev/null +++ b/packages/chassis-srv/.npmignore @@ -0,0 +1,5 @@ +.* +src +coverage +test + diff --git a/packages/chassis-srv/CHANGELOG.md b/packages/chassis-srv/CHANGELOG.md new file mode 100644 index 00000000..4cfaa46d --- /dev/null +++ b/packages/chassis-srv/CHANGELOG.md @@ -0,0 +1,284 @@ +### 1.6.1 (May 14th, 2024) + +- remove response logging + +### 1.6.0 (April 20th, 2024) + +- changes for custom arguments for custom query + +### 1.5.0 (April 15th, 2024) + +- update deps + +### 1.4.9 (March 19th, 2024) + +- update deps + +### 1.4.8 (February 29th, 2024) + +- update deps + +### 1.4.7 (February 21st, 2024) + +- update deps + +### 1.4.6 (December 9th, 2023) + +- update grpc-client dependency + +### 1.4.5 (December 9th, 2023) + +- metadata upstream passing + +### 1.4.4 (November 26th, 2023) + +- removed deprecated method (collection.load) + +### 1.4.3 (November 25th, 2023) + +- updated all dependencies (added created_by field to meta and client_id to tokens) + +### 1.4.2 (November 20th, 2023) + +- updated all dependencies + +### 1.4.1 (November 15th, 2023) + +- updated token proto for expires_in, last_login and user proto for last_access +- updated all dependencies + +### 1.4.0 (October 7th, 2023) + +- updated node version to 20 and all other dependencies + +### 1.3.1 (September 18th, 2023) + +- updated test proto files + +### 1.3.0 (September 18th, 2023) + +- updated deps (all fields are made optional in proto file) + +### 1.2.6 (July 21st, 2023) + +- updated deps + +### 1.2.5 (July 21st, 2023) + +- updated deps + +### 1.2.4 (July 12th, 2023) + +- updated deps + +### 1.2.3 (July 11th, 2023) + +- updated deps (google.protobuf.timestamp proto usage for date fields) + +### 1.2.2 (June 16th, 2023) + +- updated libs (rename resource to master_data, up kafka-client and rc-grpc-clients) + +### 1.2.1 (May 31st, 2023) + +- renamed filter to filters for Graph filters and fixed tests + +### 1.2.0 (May 30th, 2023) + +- updated deps (For pluralized protos and libs) + +### 1.1.2 (October 14th, 2022) + +- updated dependencies + +### 1.1.1 (October 14th, 2022) + +- updated dependencies logger (for centralized field handling), protos (address changes) + +### 1.1.0 (October 4th, 2022) + +- support for full text search + +### 1.0.2 (August 25th, 2022) + +- move dev deps to main deps as needed by other modules + +### 1.0.1 (August 25th, 2022) + +- added google-protobuf as main dep as needed by reflection service + +### 1.0.0 (August 25th, 2022) + +- migrated to fully-typed grpc-client and server +- up deps + +### 0.3.12 (July 7th, 2022) + +- up deps + +### 0.3.11 (June 28th, 2022) + +- up deps and fixed logger error messages + +### 0.3.10 (May 27th, 2022) + +- up dependencies + +### 0.3.9 (March 23rd, 2022) + +- fix traversal filters for root entity + +### 0.3.8 (February 18th, 2022) + +- fixed offset store config + +### 0.3.7 (February 11th, 2022) + +- updated dependencies +- removed emitting healthcheck resposne to kafka +- migrate from ioredis to redis + +### 0.3.6 (January 28th, 2022) + +- restructured graph traversal api +- updated dependencies + +### 0.3.5 (December 12th, 2021) + +- updated RC dependencies + +### 0.3.4 (December 9th, 2021) + +- updated dependencies + +### 0.3.3 (December 2nd, 2021) + +- fix empty uniqueness vertices check + +### 0.3.2 (September 6th, 2021) + +- fix removeVertex response + +### 0.3.1 (August 9th, 2021) + +- fix options for protoLoader.loadSync to restore enums as strings + +### 0.3.0 (August 4th, 2021) + +- updated grpc-client, arangojs +- fix command interface to not to throw errors and send error response back, fixed command interface tests and updated arangojs in package.json +- migrated from grpc to grpc-js and fixed the tests. +- included error array changes + +### 0.2.3 (May 18th, 2021) + +- improved error logging + +### 0.2.2 (May 5th, 2021) + +- updated kafka-client + +### 0.2.0 (April 27th, 2021) + +#### Contains breaking changes! + +- switch to kafkajs +- changed config format for events +- updated dependencies + +### 0.1.12 (March 19th, 2021) + +- migrated from redis to ioredis module (command interface and offset store) +- modified flushCache to stream kesy and delete +- updated dependencies + +### 0.1.11 (March 9th, 2021) + +- fix for removing buffer fields in response message from logging +- added mask fields to remove logging data +- updated dependencies + +### 0.1.10 (February 15th, 2021) + +- fix to send response stream object and return write response for back pressure handling on server write +- updated request stream on server to return stream object +- updated dependencies + +### 0.1.9 (February 11th, 2021) + +- Fix oneOf fields implementation to work for both resource service and device service + +### 0.1.8 (January 22nd, 2020) + +- Remove of unnecessary oneOf fields from the create, update, upsert requests, +which cause gRPC protobuff error + +### 0.1.7 (November 18th, 2020) + +- Update dependencies + +### 0.1.6 (October 15th, 2020) + +- Update logger and service-config +- Add redis readiness check +- Add dependency readiness checks + +### 0.1.5 (October 9th, 2020) + +- Add standard GRPC health service + +### 0.1.4 (August 19th, 2020) + +- updated kafka-client and logger + +### 0.1.3 (July 30th, 2020) + +- added check for collection exists for arangoDB + +### 0.1.2 (July 27th, 2020) + +- added flush cache on command interface + +### 0.1.1 (July 21st, 2020) + +- fix to update keys for setApiKey and configUpdate + +### 0.1.0 (July 21st, 2020) + +- restructured config of command interface + +### 0.0.9 (July 21st, 2020) + +- added setApiKey command, updated tests and documentation + +### 0.0.8 (July 8th, 2020) + +- updated grpc-client, kafka-client and other dependencies + +### 0.0.7 (July 8th, 2020) + +- fix to send complete error object in streaming case + +### 0.0.6 (July 2nd, 2020) + +- fix to send error details + +### 0.0.5 (July 2nd, 2020) + +- map errors + +### 0.0.4 (July 2nd, 2020) + +- fix request streaming error handling and updated dependecies + +### 0.0.3 (March 4th, 2020) + +- added support to remove buffer fields + +### 0.0.2 (January 29th, 2020) + +- added config_update command on command-interface + +### 0.0.1 (January 29th, 2020) + +Initial share. diff --git a/packages/chassis-srv/README.adoc b/packages/chassis-srv/README.adoc new file mode 100644 index 00000000..7e4533b4 --- /dev/null +++ b/packages/chassis-srv/README.adoc @@ -0,0 +1,15 @@ += Chassis Service + +https://www.npmjs.com/package/@restorecommerce/chassis-srv[image:https://img.shields.io/npm/v/@restorecommerce/chassis-srv.svg?style=flat-square[Version]] +https://github.com/restorecommerce/chassis-srv/actions/workflows/build.yaml[image:https://img.shields.io/github/actions/workflow/status/restorecommerce/chassis-srv/build.yaml?style=flat-square[Build Status]] +https://depfu.com/repos/github/restorecommerce/chassis-srv?branch=master[image:https://img.shields.io/depfu/dependencies/github/restorecommerce/chassis-srv?style=flat-square[Dependencies]] +https://coveralls.io/github/restorecommerce/chassis-srv?branch=master[image:https://img.shields.io/coveralls/github/restorecommerce/chassis-srv/master.svg?style=flat-square[Coverage Status]] + +A chassis microservice. + +Please consult the documentation for using it: + +- *link:https://docs.restorecommerce.io/chassis-srv/index.html[Usage]* +- *link:https://docs.restorecommerce.io/architecture/index.html[Restorecommerce Architecture]* + +Part of link:https://github.com/restorecommerce[Restorecommerce]. diff --git a/packages/chassis-srv/docs/antora.yml b/packages/chassis-srv/docs/antora.yml new file mode 100644 index 00000000..d295ac3d --- /dev/null +++ b/packages/chassis-srv/docs/antora.yml @@ -0,0 +1,5 @@ +name: chassis-srv +title: Chassis Service +version: master +nav: + - modules/ROOT/nav.adoc diff --git a/packages/chassis-srv/docs/modules/ROOT/nav.adoc b/packages/chassis-srv/docs/modules/ROOT/nav.adoc new file mode 100644 index 00000000..58459568 --- /dev/null +++ b/packages/chassis-srv/docs/modules/ROOT/nav.adoc @@ -0,0 +1,18 @@ +// INDEX +* xref:index.adoc[Chassis Service] + +// FEATURES +* xref:index.adoc#features[Features] + +// ARCHITECTURE +* xref:index.adoc#architecture[Architecture] + +// CONFIGURATION +* xref:index.adoc#configuration[Configuration] +** xref:index.adoc#configuration_logging[Logging] +** xref:index.adoc#configuration_server[Server] +** xref:index.adoc#configuration_middleware[Middleware] +** xref:index.adoc#configuration_cache[Cache] +** xref:index.adoc#configuration_database[Database] +** xref:index.adoc#configuration_command_interface[Command Interface] +** xref:index.adoc#configuration_offset_store[Offset Store] diff --git a/packages/chassis-srv/docs/modules/ROOT/pages/command-interface.adoc b/packages/chassis-srv/docs/modules/ROOT/pages/command-interface.adoc new file mode 100644 index 00000000..f991eaa4 --- /dev/null +++ b/packages/chassis-srv/docs/modules/ROOT/pages/command-interface.adoc @@ -0,0 +1,153 @@ += Command Interface + +The generic command interface allows querying information from and triggering actions on microservices. +Commands are usually used for administrative or operational concerns and should not be used for actions +triggered by ordinary users. There are common commands but also such that are only understood by individual services. +The command interface supports the following communication patterns: + +* Fire-and-forget +* Request-reply + +Technically, the command interface is described by the `Command` endpoint. +This endpoint is available as https://grpc.io/docs/[gRPC] interface and event-driven communication through Kafka. +Request and response message structures are defined using https://developers.google.com/protocol-buffers/[Protocol Buffers] +in the https://github.com/restorecommerce/protos/blob/master/io/restorecommerce/commandinterface.proto[commandinterface.proto] file. +Due to the high variability among all command possible parametes, the `payload` field is defined as a `google.protobuf.Any` message +(see https://github.com/restorecommerce/protos/tree/master/google/protobuf[google] protos), as well as all gRPC response messages. +The `CommandResponse` message is mainly used on Kafka events, as it contains a `services` field, which identifies all services bound to a specific microservice. + +The following common system commands are available (also see below): + +* health_check (microservice health check) +* restore (re-process https://kafka.apache.org/[Apache Kafka] event messages to restore system data) +* reset (reset system data and state) +* version (return runtime version information) +* set_api_key (sets api key on microservices) +* config_update (update configuration on microservices) + +Unimplemented: + +* reconfigure (reload configurations for one or more microservices) + +Note that the provided implementation's commands can be extended or even overriden when it is partially or totally +incompatible with a service's context. It is also straightforward to include new commands by extending the given +link:../../../../src/command-interface/index.ts[CommandInterface] class. + +== gRPC Interface + +=== Command + +|=== +|Field |Type |Label |Description + +|name |string |required |name of the command +|payload |`google.protobuf.Any` |optional |command-specific parameters +|=== + +== Common Commands + +=== HealthCheck + +This command allows to retrieve a healt status for a service (note that a restorecommerce microservice may have several +service names bound to it). + +Possible `payload` fields in a request: + +|=== +|Field |Type |Label |Description + +|service |string |required |name of the service to be checked +|=== + +Possible fields in a response: + +|=== +|Field |Type |Label |Description + +|status |string |required |serving status; it can be `SERVING`, `NOT_SERVING` and `UNKNOWN` +|=== + +=== Restore + +This command allows to restore the state of an implementing service, as well as all data managed by that service. +The default implementation checks the configuration files for all DB instances bound to the implementing service +and maps a set of Kafka events to a set of CRUD operations. +These Kafka events are emitted by the service every time a resource is created/ modified in the store. +The same events are processed from a Kafka consumer offset in order to restore all data since a previous a point in time. + +*Note*: this event processing can only be done in the correct order with single partitioned-topics, +as Kafka ensures offset order per-partition. + +Possible `payload` fields in a request: + +|=== +|Field |Type |Label |Description + +|data |[ ]RestoreData |required |list of topics for message re-processing +|=== + +`RestoreData` + +|=== +|Field |Type |Label |Description + +|entity |string |required |The resource's entity name +|base_offset |number |optional |Base offset at which to start the restore process; default is `0` +|ignore_offset |[ ]number |optional |Topic offset values to ignore while restoring +|=== + +=== Reset + +This allows to wipe all data owned by a microservice. +The `chassis-srv` default implementation only supports the chassis ArangoDB database provider as a valid provider. +When `reset` is called, each of the specified resource's DB is truncated. There are no specific parameters either +for the request payload and for the response. + +=== Version + +This command returns the NPM package and Node.js version of the implementing service. + +Response fields: + +|=== +|Field |Type |Label |Description + +|version |string |required |NPM package version +|nodejs |string |required |Node.js version +|=== + +=== SetApiKey + +This command `set_api_key` sets the `authentication:apiKey` on the microservices, the configuration to set the API Key is provided +in the payload of the command. This API Key is used by the https://github.com/restorecommerce/acs-client[`acs-client`] +to override access control checks for https://github.com/restorecommerce/acs-client[`access-control-srv`]. + +=== ConfigUpdate + +This command `config_update` sets the provided configuration on the microservices. The configuration to be set is provided in the +payload of the command. + +=== FlushCache + +This command `flush_cache` is used to flush / invalidate the cache. + +Possible `payload` fields in a request: + +|=== +|Field |Type |Label |Description + +|data |FlushCacheData |required | flush cache payload +|=== + +`FlushCacheData` + +|=== +|Field |Type |Label |Description + +|db_index |number |optional |Redis DB index to be flushed +|pattern |string |optional |Pattern to match redis keys to be flushed +|=== + +== Usage + +See link:../../../../test/command_test.ts[tests]. diff --git a/packages/chassis-srv/docs/modules/ROOT/pages/index.adoc b/packages/chassis-srv/docs/modules/ROOT/pages/index.adoc new file mode 100644 index 00000000..93f642db --- /dev/null +++ b/packages/chassis-srv/docs/modules/ROOT/pages/index.adoc @@ -0,0 +1,177 @@ += Chassis Service + +https://www.npmjs.com/package/@restorecommerce/chassis-srv[image:http://img.shields.io/npm/v/@restorecommerce/chassis-srv.svg?style=flat-square[Version]] +https://travis-ci.org/restorecommerce/chassis-srv?branch=master[image:http://img.shields.io/travis/restorecommerce/chassis-srv/master.svg?style=flat-square[Build Status]] +https://david-dm.org/restorecommerce/chassis-srv[image:https://img.shields.io/david/restorecommerce/chassis-srv.svg?style=flat-square[Dependencies]] +https://coveralls.io/github/restorecommerce/chassis-srv?branch=master[image:http://img.shields.io/coveralls/restorecommerce/chassis-srv/master.svg?style=flat-square[Coverage Status]] + +A base service part of the link:https://github.com/restorecommerce[Restorecommerce]. + +[#features] +== Features + +* Business logic exposable via gRPC +* Retry and timeout logic +* Even sourcing via Kafka messaging +* Endpoint calls with custom middleware +* Primitives for logging, database access, cache handling or exposing system commands +* full text search for ArangoDB + +[#architecture] +== Architecture + +The chassis service consists of the following components: + +* a configuration loader +* a multi-transport configurable log infrastructure +* a base Restorecommerce microservice structure provided by the link:../../../../src/microservice/server.ts[Server] class, +which emits state-related events and can be bound to a number of https://grpc.io/docs/[gRPC] endpoints, +given a https://developers.google.com/protocol-buffers/docs/overview[Protocol Buffer] interface and a transport config +* custom middleware +* a cache-loader based on configuration files +* a provider-based mechanism to access different databases +* a base implementation for a link:https://github.com/restorecommerce/chassis-srv/blob/master/docs/modules/ROOT/pages/command-interface.adoc[command-interface] +* periodic storage for https://kafka.apache.org/[Apache Kafka] topic offsets + +[#configuration] +== Configuration + +- Configs are loaded using the https://github.com/indexzero/nconf[nconf]-based module +https://github.com/restorecommerce/service-config[service-config]. +- Such configuration files may contain endpoint specifications +along with their associated transports or simple access configs for backing services such as a database or even a Kafka instance. +- To remove the buffered data from being logged +link:https://github.com/restorecommerce/chassis-srv/blob/master/test/cfg/config.json#L343[bufferedFields] +configuration can be set. +- To mask the confidential data (such as password) from being logged +link `maskFields` an array of fields can be set in configuration similar to buffer fields. +- To avoid a gRPC protobuf error when making a read query for resources which have +oneOf fields defined in the protobuf file, the oneOf fields can be removed from +the request items by providing the oneOfFields like in the service configuration +below: +[source,json] +---- +{ + "oneOfFields": { + "resourceName": { + "oneOfName": [ + "fieldName1", + "fieldName2" + ] + } + } +} +---- + +[#configuration_logging] +=== Logging + +Logging functionality is provided through https://github.com/restorecommerce/logger[logger], +which uses https://github.com/winstonjs/winston[winston]. +Logger output transport, severity levels and other options are configurable. + +Default logging levels are: +- `silly` +- `verbose` +- `debug` +- `info` +- `warn` +- `error` + +[#configuration_server] +=== Server + +A link:https://github.com/restorecommerce/chassis-srv/blob/master/src/microservice/server.ts[Server] instance can provide multiple service endpoints +and emits events related with the microservice's state. +An endpoint is a wrapped gRPC method accessible from any gRPC clients. +It is also possible to configure the Server with number of times a request should be +link:https://github.com/restorecommerce/chassis-srv/blob/master/test/microservice_test.ts#L456[`retried and timeout configurations`]. +Service responses always include a result or an error. When a `Server` is instantiated, +it is possible to bind one or more services to it, each of them exposing its own RPC endpoints +with an associated transport configuration (port, protobuf interfaces, service name, etc). +Note that other transport types beside `gRPC` are theoretically possible, +although that would require an extension of the `Server` class with a custom transport config. + +[#configuration_middleware] +=== Middleware + +Endpoint calls may be intercepted with any number of link:https://github.com/restorecommerce/chassis-srv/blob/master/test/middleware_test.ts[custom chained middlewares]. +The request traverses the middleware before reaching the service function. +The middleware can call the next middleware until the last middleware calls the service function. + +[#configuration_cache] +=== Cache + +Multiple cache providers can be registered and loaded within a microservice. +Such providers are managed with https://github.com/BryanDonovan/node-cache-manager[node-cache-manager]. + +[#configuration_database] +=== Database + +The following database providers are implemented: + +* https://www.arangodb.com/documentation/[ArangoDB] +* https://github.com/louischatriot/nedb[NeDB] + +Providers include generic database handling operations (find, insert, upsert delete, truncate, etc). +Query parameter structure for all exposed operations is similar with the structure used in +https://docs.mongodb.com/manual/tutorial/getting-started/[MongoDB] queries. +The ArangoDB provider supports graph database creation and exposes a simple API to manage vertices and edges. +It also provides a flexible traversal method. For more details, see link:https://github.com/restorecommerce/chassis-srv/blob/master/test/graphs_test.ts[graph tests] +and the https://docs.arangodb.com/3.3/HTTP/Gharial/[ArangoDB graphs documentation]. +Database providers can be used as a database abstraction by any service that owns a set of resources. +Furthermore, services can later expose their database operations via gRPC. +Exposure of these operations is easily achieved using the https://github.com/restorecommerce/resource-base-interface[resource-base-interface]. + +[#configuration_full_text_search] +==== Full text search + +Full text search is supported currently for ArangoDB in `chassis-srv`. +A view should be created for each entity with the list of fields to be indexed. +The View definition such as entity to which the view links to and the set of fields to be indexed with supported list of analyzers can be configured in view definition. +Currently `ngram` and `pipeline` type analyzers are supported, analyzer options can be changed via configuration file. + +To enable the full text search for entity the collection name and view configuration file should be specified in https://github.com/restorecommerce/chassis-srv/blob/master/test/cfg/config.json#L22[configuration]. +Enable search with `database.arango.arangoSearch` a list containing `collectionName` and `path` to View and Analyzer configuration file + +Refer https://github.com/restorecommerce/chassis-srv/blob/master/test/views/users_view.json#[user view] configuration for test View and Analyzer configuration. + +The following Analyzer specific configuration properties are available : + +* `analyzers` [`string [ ]`]: supported analyzers list +* `analyzerOptions.type` [`string`]: type of analyzer currently `ngram` and `pipeline` are supported +* `analyzerOptions.properties.min` [`number`]: minimum n-gram size to match from search string +* `analyzerOptions.properties.max` [`number`]: maximum n-gram size to match from search string +* `analyzerOptions.properties.preserveOriginal` [`boolean`]: `true` to include the original value as well, `false` to produce the n-grams based on min and max only +* `analyzerOptions.properties.startMarker` [`string, optional`]: this value will be prepended to n-grams which include the beginning of the input. Can be used for matching prefixes. Choose a character or sequence as marker which does not occur in the input. +* `analyzerOptions.properties.endMarker` [`string, optional`]: this value will be appended to n-grams which include the end of the input. Can be used for matching suffixes. Choose a character or sequence as marker which does not occur in the input. +* `analyzerOptions.properties.streamType` [`string, optional`]: type of the input stream `binary` one byte is considered as one character (default) `utf8` one Unicode codepoint is treated as one character. + +The following View specific configuration properties are available: + +* `view.CollectionName` [`string`]: collection name to which view links to. +* `view.viewName` [`string`]: View name +* `view.similarityThreshold` [`number`]: to assess the similarity of longer strings that share subsequences value betwen `0.0` and `1.0` +* `view.options`: View Options containing list of `fields` to be indexed with applicable `analyzers` for each field. + +Refer test for further details https://github.com/restorecommerce/chassis-srv/blob/master/test/database.spec.ts#L525[full text search tests]. + +[#configuration_command_interface] +=== Command Interface + +An interface for system commands (useful information retrieval, system control, etc) is also provided. +For more details about all implemented operations please refer +link:https://github.com/restorecommerce/chassis-srv/blob/master/docs/modules/ROOT/pages/command-interface.adoc[command-interface]. +This interface can be directly exposed as a gRPC endpoint and it can be extended by a microservice for custom functionality. + +[#configuration_offset_store] +=== Offset Store + +This stores the offset values for each Kafka topic within each microservice at a fixed interval +to a https://redis.io/[Redis] database. +Such intervals are configurable through the `offsetStoreInterval` configuration value. +The offset values are stored with key `{kafka:clientId}:{topicName}`. +In case of a service failure, a microservice can then read the last offset it stored before crashing and thus +consume all pending messages since that moment. +This feature can be disabled if the `latestOffset` configuration value is set to `true` - in this case, +the service subscribes to the latest topic offset value upon system restart. diff --git a/packages/chassis-srv/eslint.config.mjs b/packages/chassis-srv/eslint.config.mjs new file mode 100644 index 00000000..b5d8bfe7 --- /dev/null +++ b/packages/chassis-srv/eslint.config.mjs @@ -0,0 +1,22 @@ +// @ts-check + +import eslint from '@eslint/js'; +import { RuleTester } from 'eslint'; +import tseslint from 'typescript-eslint'; + +const rules = tseslint.config( + eslint.configs.recommended, + ...tseslint.configs.recommended, +); + +rules.push( + { + "rules": { + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/no-unused-vars": "off", + "prefer-rest-params": "off", + } + } +); + +export default rules; diff --git a/packages/chassis-srv/package.json b/packages/chassis-srv/package.json new file mode 100644 index 00000000..96dc4e8d --- /dev/null +++ b/packages/chassis-srv/package.json @@ -0,0 +1,79 @@ +{ + "name": "@restorecommerce/chassis-srv", + "version": "1.6.8", + "description": "Restore Commerce microservice chassis", + "main": "./lib/index.js", + "author": "n-fuse GmbH", + "repository": { + "type": "git", + "url": "https://github.com/restorecommerce/chassis-srv.git" + }, + "license": "MIT", + "keywords": [ + "restore", + "commerce", + "microservice", + "chassis" + ], + "type": "module", + "typings": "lib/index.d.ts", + "dependencies": { + "@restorecommerce/grpc-client": "^2.2.7", + "@restorecommerce/kafka-client": "^1.2.28", + "@restorecommerce/logger": "^1.3.3", + "@restorecommerce/rc-grpc-clients": "^5.1.50", + "@restorecommerce/service-config": "^1.1.1", + "abort-controller-x": "^0.4.3", + "arangojs": "^9.1.0", + "async": "^3.2.6", + "async-retry": "^1.3.3", + "cache-manager": "^6.1.3", + "cls-hooked": "^4.2.2", + "cls-rtracer": "^2.6.3", + "google-protobuf": "^3.21.4", + "kafkajs": "^2.2.4", + "keyv": "^5.2.1", + "lodash": "^4.17.21", + "long": "^5.2.3", + "lru-cache": "^11.0.2", + "nedb": "^1.8.0", + "nice-grpc": "^2.1.10", + "nice-grpc-server-reflection": "^2.0.12", + "protobufjs": "^7.4.0", + "redis": "^4.7.0" + }, + "devDependencies": { + "@restorecommerce/dev": "^0.0.13", + "@types/async": "^3.2.6", + "@types/async-retry": "^1.4.9", + "@types/cache-manager": "^4.0.6", + "@types/lodash": "^4.17.13", + "@types/nedb": "^1.8.16", + "@types/node": "^22.9.1", + "@typescript-eslint/parser": "^8.15.0", + "@vitest/coverage-v8": "^3.2.4", + "cross-env": "^7.0.3", + "eslint": "^9.15.0", + "npm-run-all": "^4.1.5", + "rimraf": "^6.0.1", + "rxjs": "^7.8.1", + "should": "^13.2.3", + "typescript": "^5.6.3", + "typescript-eslint": "^8.15.0", + "vitest": "^3.2.4" + }, + "scripts": { + "lint": "eslint src", + "test": "vitest run", + "build:tsc": "tsc -d", + "build:clean": "rimraf lib", + "build": "npm-run-all lint build:clean build:tsc" + }, + "browser": { + "fs": false, + "path": false + }, + "engines": { + "node": ">= 20.0.0" + } +} diff --git a/packages/chassis-srv/src/cache/index.ts b/packages/chassis-srv/src/cache/index.ts new file mode 100644 index 00000000..d3ec0df8 --- /dev/null +++ b/packages/chassis-srv/src/cache/index.ts @@ -0,0 +1,55 @@ +import * as _ from 'lodash'; +import { createCache } from 'cache-manager'; +import { Keyv } from 'keyv'; +import { LRUCache } from 'lru-cache'; +import { Logger } from 'winston'; +import { ObjectEncodingOptions } from 'fs'; + +/** + * A key, value map containing cache providers. + * Cache providers are registered with the register function. + */ +const providers: Record = {}; + +/** + * Register a cache provider. + * Providers need to be compatible to the node-cache-manager module. + * https://github.com/BryanDonovan/node-cache-manager + * @param {string} name cache provider identifier + * @param {constructor} provider cache store constructor + */ +export const register = (name: string, provider: any): void => { + providers[name] = provider; +}; + +// register defaults +// add memory provider by default, since it is included with the cache-manager. +register('memory', (config: any, logger: any) => { + const options = { + max: config?.max || 500, + dispose: config?.dispose, + allowStale: config?.allowStale, + ttl: config?.ttl || 5000, + }; + const lruCache = new LRUCache(options); + const keyv = new Keyv({ store: lruCache }); + const cache = createCache({ stores: [keyv] }); + return cache; +}); + +/** + * Get a new cache instance. + * @param {Object} config Cache configuration + * @param [Logger] logger + * @return Cache instance + */ +export const get = (config: any, logger: Logger): any => { + if (_.isNil(config)) { + throw new Error('missing argument config'); + } + + const providerName = config[0].provider; + const provider = providers[providerName]; + const cache = provider(config, logger); + return cache; +}; diff --git a/packages/chassis-srv/src/command-interface/index.ts b/packages/chassis-srv/src/command-interface/index.ts new file mode 100644 index 00000000..f6cc6674 --- /dev/null +++ b/packages/chassis-srv/src/command-interface/index.ts @@ -0,0 +1,769 @@ +import * as _ from 'lodash'; +import { Server } from './../microservice/server.js'; +import * as database from './../database/index.js'; +import { Events, Topic, registerProtoMeta, Kafka } from '@restorecommerce/kafka-client'; +import { EventEmitter } from 'events'; +import * as async from 'async'; +import { Logger } from 'winston'; +import { RedisClientType } from 'redis'; +import { + CommandRequest, + CommandInterfaceServiceImplementation, + protoMetadata +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/commandinterface.js'; +import { CallContext } from 'nice-grpc'; +import { + HealthCheckResponse_ServingStatus +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/grpc/health/v1/health.js'; + +// For some reason this is required +import {randomBytes } from 'crypto'; + +registerProtoMeta(protoMetadata); + +interface RestoreData { + base_offset: number; + ignore_offset: number[]; + entity: string; // resource name +} + +interface FlushCacheData { + db_index?: number; + pattern?: string; +} + +/** + * Base implementation. + * Currently includes: + * * 'check' - returns UNKNOWN, SERVING or NOT_SERVING + * * 'version' - returns NPM service version and Node.js version + * * 'reset' - truncated all DB instances specified in config files + * * 'restore' - re-reads Kafka events to restore a set of ArangoDB collections' data + * Unimplemented: + * * reconfigure + * + * In case of custom data/events handling or service-specific operations regarding + * a certain method, such method should be extended or overriden. + */ +export class CommandInterface implements CommandInterfaceServiceImplementation { + public logger: Logger; + config: any; + health: any; + service: any; + kafkaEvents: Events; + commands: any; + commandTopic: Topic; + bufferedCollection: Map; + redisClient: RedisClientType; + + constructor(server: Server, config: any, logger: Logger, events: Events, redisClient: RedisClientType) { + if (_.isNil(events)) { + if (logger.error) { + logger.error('No Kafka client was provided. Disabling all commands.'); + return; + } + } + + this.config = config; + this.logger = logger; + this.redisClient = redisClient; + + if (!this.config.get('events:kafka:topics:command')) { + throw new Error('Commands topic configuration was not provided.'); + } + + this.kafkaEvents = events; + + // Health + this.health = { + status: HealthCheckResponse_ServingStatus.UNKNOWN, + }; + this.service = {}; + server.on('bound', (serviceName) => { + this.service[serviceName] = { + bound: true, + transport: {}, + }; + this.health.status = HealthCheckResponse_ServingStatus.NOT_SERVING; + }); + server.on('serving', (transports) => { + this.health.status = HealthCheckResponse_ServingStatus.SERVING; + _.forEach(transports, (transport, transportName) => { + _.forEach(this.service, (srv, serviceName) => { + this.service[serviceName].transport[transportName] = HealthCheckResponse_ServingStatus.SERVING; + }); + }); + }); + server.on('stopped', (transports) => { + this.health.status = HealthCheckResponse_ServingStatus.NOT_SERVING; + _.forEach(transports, (transport, transportName) => { + _.forEach(this.service, (srv, serviceName) => { + this.service[serviceName].transport[transportName] = HealthCheckResponse_ServingStatus.NOT_SERVING; + }); + }); + }); + + // list of available commands + this.commands = { + reset: this.reset, + restore: this.restore, + reconfigure: this.reconfigure, + health_check: this.check, + version: this.version, + config_update: this.configUpdate, + set_api_key: this.setApiKey, + flush_cache: this.flushCache + }; + const topicCfg = config.get('events:kafka:topics:command'); + + events.topic(topicCfg.topic).then(topic => this.commandTopic = topic).catch(err => { + this.logger.error('Error occurred while retrieving command kafka topic', { + code: err.code, + message: err.message, + stack: err.stack + }); + }); + + // check for buffer fields + const buffFields = this.config.get('fieldHandlers:bufferFields') ?? {}; + this.bufferedCollection = new Map( + Object.entries(buffFields) + ); + this.logger.info('Buffered collections are:', this.bufferedCollection); + } + + /** + * Generic command operation, which demultiplexes a command by its name and parameters. + */ + public async command(request: CommandRequest, context: CallContext): Promise<{ typeUrl?: string; value?: Buffer }> { + if (_.isNil(request) || _.isNil(request.name)) { + return this.encodeMsg({ + error: { + code: 400, + message: 'No command name provided', + } + }); + } + + if (_.isNil(this.commands[request.name])) { + return this.encodeMsg({ + error: { + code: 400, + message: `Command name ${request.name} does not exist` + } + }); + } + + const payload = request.payload ? this.decodeMsg(request.payload) : null; + + // calling operation bound to the command name + const result = await Promise.resolve(this.commands[request.name].apply(this, [payload])).catch(err => ({ + error: { + code: 404, + message: err.message + } + })); + + return this.encodeMsg(result); + } + + /** + * Reconfigure service + * @param call + * @param context + */ + reconfigure(): any { + this.logger.info('reconfigure is not implemented'); + return { + error: { + code: 501, + message: 'reconfigure is not implemented', + } + }; + } + + /** + * Restore the system by re-reading Kafka messages. + * This base implementation restores documents from a set of + * ArangoDB database collections, using the chassis-srv database provider. + * @param topics list of Kafka topics to be restored + */ + async restore(payload: any): Promise { + if (_.isEmpty(payload) || _.isEmpty(payload.data)) { + // throw new errors.InvalidArgument('Invalid payload for restore command'); + return { + error: { + code: 400, + message: 'Invalid payload for restore command' + } + }; + } + + const restoreData: RestoreData[] = payload.data || []; + + // the Kafka config should contains a key-value pair, mapping + // a label with the topic's name + const kafkaEventsCfg = this.config.get('events:kafka'); + const kafkaCfg = this.config.get('events:kafka:topics'); + if (_.isNil(kafkaCfg) || kafkaCfg.length == 0) { + return { + error: { + code: 500, + message: 'Kafka topics config not available' + } + }; + } + + const topicLabels = _.keys(kafkaCfg).filter((elem, index) => { + return elem.includes('.resource'); + }).map((elem) => { + return elem.replace('.resource', ''); + }); + + const restoreSetup: Record = {}; + const restoreEventSetup: any = {}; + + restoreData.forEach((data) => { + const ignoreOffset = (data.ignore_offset || []).filter((offset) => { + const isNumber = Number(offset); + if (!isNumber) { + this.logger.warn(`Invalid value for "ignore_offset" parameter in restore: ${offset}`); + } + return isNumber; + }); + restoreSetup[data.entity] = { + baseOffset: Number(data.base_offset) || 0, + ignoreOffset + }; + }); + + const restoreCollections = _.keys(restoreSetup); + + try { + const dbCfgs = this.config.get('database'); + const dbCfgNames = _.keys(dbCfgs); + for (let i = 0; i < dbCfgNames.length; i += 1) { + const dbCfgName = dbCfgNames[i]; + const dbCfg = dbCfgs[dbCfgName]; + const collections = dbCfg.collections; + let graphName, edgeConfigDefs; + if (this.config.get('graph')) { + graphName = this.config.get('graph:graphName'); + edgeConfigDefs = this.config.get('graph:edgeDefinitions'); + } + const db = await database.get(dbCfg, this.logger, graphName, edgeConfigDefs); + + if (_.isNil(collections)) { + this.logger.warn('No collections found on DB config'); + return {}; + } + + let intersection: string[] = _.intersection(restoreCollections, collections); + if (intersection.length > 0) { + intersection = _.intersection(intersection, topicLabels); + for (const resource of intersection) { + const topicName = kafkaCfg[`${resource}.resource`].topic; + restoreEventSetup[topicName] = { + topic: await this.kafkaEvents.topic(topicName), + events: this.makeResourcesRestoreSetup(db, resource), + baseOffset: restoreSetup[resource].baseOffset, + ignoreOffset: restoreSetup[resource].ignoreOffset + }; + } + } + } + + if (_.isEmpty(restoreEventSetup)) { + this.logger.warn('No data was setup for the restore process.'); + } else { + const logger = this.logger; + const kafkaEvents = this.kafkaEvents; + const config = this.config; + const service = this.service; + const encodeMsg = this.encodeMsg.bind(this); + const commandTopic = this.commandTopic; + const startToReceiveRestoreMessages = this.startToReceiveRestoreMessages.bind(this); + // Start the restore process + this.logger.warn('restoring data'); + + for (const topicName in restoreEventSetup) { + const topicSetup: any = restoreEventSetup[topicName]; + const restoreTopic: Topic = topicSetup.topic; + const topicEvents: any = topicSetup.events; + + // saving listeners for potentially subscribed events on this topic, + // so they don't get called during the restore process + const previousEvents: string[] = _.cloneDeep(restoreTopic.subscribed); + const listenersBackup = new Map(); + for (const event of previousEvents) { + listenersBackup.set(event, (restoreTopic.emitter as EventEmitter).listeners(event)); + await restoreTopic.removeAllListeners(event); + } + + // const eventNames = _.keys(restoreTopic.events); + const baseOffset: number = topicSetup.baseOffset; + const targetOffset: bigint = (await restoreTopic.$offset(BigInt(-1))) - BigInt(1); + const ignoreOffsets: number[] = topicSetup.ignoreOffset; + const eventNames = _.keys(topicEvents); + + this.logger.debug(`topic ${topicName} has current offset ${targetOffset}`); + + const restoreGroupId = kafkaEventsCfg.groupId + '-restore-' + randomBytes(32).toString('hex'); + + const consumer = await (this.kafkaEvents.provider as Kafka).newConsumer(restoreGroupId); + + let messageStream: { + close(): Promise; + }; + + const drainEvent = (message: any, done: any) => { + const msg = message.value; + const eventName = message.key.toString(); + const context = _.pick(message, ['offset', 'partition', 'topic']); + const eventListener = topicEvents[message.key]; + // decode protobuf + let decodedMsg = kafkaEvents.provider.decodeObject(kafkaEventsCfg, eventName, msg); + decodedMsg = _.pick(decodedMsg, _.keys(decodedMsg)); // preventing protobuf.js special fields + eventListener(decodedMsg, context, config.get(), eventName).then(() => { + done(); + }).catch((err: any) => { + logger.error(`Exception caught invoking restore listener for event ${eventName}`, { code: err.code, message: err.message, stack: err.stack }); + done(err); + }); + + if (message.offset >= targetOffset) { + for (const event of eventNames) { + restoreTopic.removeAllListeners(event).then(() => { }).catch((err) => { + logger.error('Error removing listeners after restore', { code: err.code, message: err.message, stack: err.stack }); + }); + } + for (const event of previousEvents) { + const listeners = listenersBackup.get(event); + for (const listener of listeners) { + restoreTopic.on(event, listener).then(() => { }).catch((err) => { + logger.error('Error subscribing to listeners after restore', { code: err.code, message: err.message, stack: err.stack }); + }); + } + } + + messageStream.close().then(() => { + return consumer.close(true); + }).then(() => { + this.kafkaEvents.provider.admin.deleteGroups({ + groups: [restoreGroupId] + }).then(() => { + logger.debug('restore kafka group deleted'); + const msg = { + topic: topicName, + offset: Number(message.offset) + }; + commandTopic.emit('restoreResponse', { + services: _.keys(service), + payload: encodeMsg(msg) + }).then(() => { + logger.info('Restore response emitted'); + }).catch((err) => { + logger.error('Error emitting command response', { code: err.code, message: err.message, stack: err.stack }); + }); + logger.info('restore process done'); + }).catch((err: any) => { + logger.error('Error deleting restore kafka group', { code: err.code, message: err.message, stack: err.stack }); + }); + }).catch(err => { + logger.error('Error stopping consumer', { code: err.code, message: err.message, stack: err.stack }); + }); + } + }; + + const asyncQueue = startToReceiveRestoreMessages(restoreTopic, drainEvent); + + await consumer.connectToBrokers().then(() => { + logger.info(`Consumer for topic '${topicName}' connected`); + }).catch((err: any) => { + logger.error('error connecting consumer', { code: err.code, message: err.message, stack: err.stack }); + }); + + consumer.consume({ + sessionTimeout: 10000, + heartbeatInterval: 500, + topics: [topicName], + mode: 'manual', + offsets: [{ + topic: topicName, + partition: 0, + offset: BigInt(baseOffset) + }], + }).then(stream => { + logger.info(`Consumer for topic '${topicName}' subscribed`); + + messageStream = stream; + + stream.on('data', (message) => { + if (message.key.toString() in topicEvents && !_.includes(ignoreOffsets, Number(message.offset))) { + asyncQueue.push(message); + logger.debug(`received message ${message.offset}/${targetOffset}`); + } + }); + }).catch((err: any) => { + logger.error(`Consumer for topic '${topicName}' failed to run`, { code: err.code, message: err.message, stack: err.stack }); + throw err; + }); + } + + this.logger.debug('waiting until all messages are processed'); + } + } catch (err: any) { + this.logger.error('Error occurred while restoring the system', { code: err.code, message: err.message, stack: err.stack }); + await this.commandTopic.emit('restoreResponse', { + services: _.keys(this.service), + payload: this.encodeMsg({ + error: err.message + }) + }); + } + + return {}; + } + + private startToReceiveRestoreMessages(restoreTopic: Topic, + drainEvent: (msg: any, err: any) => any): any { + const asyncQueue = async.queue((msg, done) => { + setImmediate(() => drainEvent(msg, (err: any) => { + if (err) { + done(err); + } else { + done(); + } + })); + }, 1); + + asyncQueue.drain(() => { + // commit state first, before resuming + this.logger.verbose('Committing offsets upon async queue drain'); + restoreTopic.commitCurrentOffsets().then(() => { + this.logger.info('Offset committed successfully'); + }); + }); + + this.logger.info('Async queue draining started.'); + return asyncQueue; + } + + /** + * Reset system data related to a service. Default implementation truncates + * a set of ArangoDB instances, using the chassis-srv database provider. + */ + async reset(): Promise { + this.logger.info('reset process started'); + if (this.health.status !== HealthCheckResponse_ServingStatus.NOT_SERVING) { + this.logger.warn('reset process starting while server is serving'); + } + + let errorMsg = null; + try { + const dbCfgs = this.config.get('database'); + const dbCfgNames = _.keys(dbCfgs); + for (let i = 0; i < dbCfgNames.length; i += 1) { + const dbCfgName = dbCfgNames[i]; + const dbCfg = dbCfgs[dbCfgName]; + const db = await database.get(dbCfg, this.logger); + switch (dbCfg.provider) { + case 'arango': + await db.truncate(); + this.logger.info(`arangodb ${dbCfg.database} truncated`); + break; + default: + this.logger.error( + `unsupported database provider ${dbCfg.provider} in database config ${dbCfgName}`); + break; + } + } + } catch (err: any) { + this.logger.error('Unexpected error while resetting the system', { code: err.code, message: err.message, stack: err.stack }); + errorMsg = err.message; + } + + const eventObject = { + services: _.keys(this.service), + payload: null as any + }; + + if (errorMsg) { + eventObject.payload = this.encodeMsg({ + error: errorMsg + }); + } else { + eventObject.payload = this.encodeMsg({ + status: 'Reset concluded successfully' + }); + } + await this.commandTopic.emit('resetResponse', eventObject); + + this.logger.info('reset process ended'); + + if (errorMsg) { + return { + error: errorMsg + }; + } + return { + status: 'Reset concluded successfully' + }; + } + + /** + * Check the service status + */ + async check(payload: { service?: string }): Promise<{ + status: HealthCheckResponse_ServingStatus; + }> { + if (_.isNil(payload)) { + throw new Error('Invalid payload for restore command'); + } + const serviceName = payload.service; + + if (_.isNil(serviceName) || _.size(serviceName) === 0) { + return { + status: this.health.status, + }; + } + const service = this.service[serviceName]; + if (_.isNil(service)) { + const errorMsg = 'Service ' + serviceName + ' does not exist'; + this.logger.warn(errorMsg); + throw new Error(errorMsg); + } + let status = HealthCheckResponse_ServingStatus.UNKNOWN; + // If one transports serves the service, set it to SERVING + _.forEach(service.transport, (transportStatus) => { + if (transportStatus === HealthCheckResponse_ServingStatus.SERVING) { + status = transportStatus; + } + }); + return { + status, + }; + } + + /** + * Retrieve current NPM package and Node version of service + */ + async version(): Promise { + const response = { + nodejs: process.version, + version: process.env.npm_package_version, + }; + await this.commandTopic.emit('versionResponse', { + services: _.keys(this.service), + payload: this.encodeMsg(response) + }); + return response; + } + + /** + * Update config for acs-client to disable it + * @param payload JSON object containing key value pairs for configuration + */ + async configUpdate(payload: any): Promise { + if (_.isNil(payload)) { + return { + error: { + code: 400, + message: 'Invalid payload for configUpdate command' + } + }; + } + let response; + try { + const configProperties = Object.keys(payload); + for (const key of configProperties) { + this.config.set(key, payload[key]); + } + response = { + status: 'Configuration updated successfully' + }; + await this.commandTopic.emit('configUpdateResponse', { + services: _.keys(this.service), + payload: this.encodeMsg(response) + }); + } catch (error: any) { + this.logger.error('Error executing configUpdate Command', { code: error.code, message: error.message, stack: error.stack }); + response = error.message; + } + return response; + } + + /** + * Sets provided authentication apiKey on configuration + * @param payload JSON object containing key value pairs for authentication apiKey + */ + async setApiKey(payload: any): Promise { + if (_.isNil(payload)) { + return { + error: { + code: 400, + message: 'Invalid payload for setApiKey command' + } + }; + } + let response; + try { + const configProperties = Object.keys(payload); + for (const key of configProperties) { + this.config.set(key, payload[key]); + } + response = { + status: 'ApiKey set successfully' + }; + await this.commandTopic.emit('setApiKeyResponse', { + services: _.keys(this.service), + payload: this.encodeMsg(response) + }); + } catch (err: any) { + this.logger.error('Error executing setApiKey Command', { code: err.code, message: err.message, stack: err.stack }); + response = err.message; + } + + return response; + } + + /** + * Flush the cache based on DB index and prefix passed, if no dbIndex is passed + * then the complete Cache is flushed. + * + * @param prefix An optional prefix to flush instead of entire cache + */ + async flushCache(payload: any): Promise { + let flushCachePayload: FlushCacheData; + if (payload && payload.data) { + flushCachePayload = payload.data; + } + let dbIndex, pattern, response; + if (flushCachePayload) { + dbIndex = flushCachePayload.db_index; + pattern = flushCachePayload.pattern; + } + if (dbIndex === undefined || !dbIndex) { + dbIndex = 0; + } + + // select the particular dbIndex + await this.redisClient.select(dbIndex); + try { + if (pattern != undefined) { + const flushPattern = '*' + pattern + '*'; + this.logger.debug('Flushing cache wiht pattern', { dbIndex, flushPattern }); + let scanIterator; + try { + scanIterator = this.redisClient.scanIterator({ MATCH: flushPattern, COUNT: 100 }); + for await (const key of scanIterator) { + await this.redisClient.del(key); + } + this.logger.debug(`Successfully flushed cache pattern ${flushPattern}`); + response = { + status: 'Successfully flushed cache pattern' + }; + } catch (err: any) { + this.logger.error('Error creating stream / pipeline in Redis', { code: err.code, message: err.message, stack: err.stack }); + response = err.message; + } + } else { + this.logger.debug('Flushing cache', { dbIndex }); + if (dbIndex || dbIndex === 0) { + // Flush all keys in the given dbIndex (flushDB) + await this.redisClient.flushDb(); + response = { + status: `Successfully flushed cache with DB index ${dbIndex}` + }; + this.logger.debug('Successfully flushed cache with DB index', { dbIndex }); + } else { + // Flush Complete Redis Cache (flushAll) + await this.redisClient.flushAll(); + response = { + status: 'Successfully flushed complete cache' + }; + this.logger.debug('Successfully flushed complete cache'); + } + } + } catch (err: any) { + this.logger.error('Error flushing Redis Cache', { code: err.code, message: err.message, stack: err.stack }); + response = err.message; + } + await this.commandTopic.emit('flushCacheResponse', { + services: _.keys(this.service), + payload: this.encodeMsg(response) + }); + return response; + } + + // Helper functions + + /** + * Generic resource restore setup. + * @param db + * @param resource + */ + makeResourcesRestoreSetup(db: any, resource: string): any { + const decodeBufferField = this.decodeBufferField.bind(this); + return { + [`${resource}Created`]: async function restoreCreated(message: any, + ctx: any, config: any, eventName: string): Promise { + decodeBufferField(message, resource); + await db.insert(`${resource}s`, message); + return {}; + }, + [`${resource}Modified`]: async function restoreModified(message: any, + ctx: any, config: any, eventName: string): Promise { + decodeBufferField(message, resource); + await db.update(`${resource}s`, { id: message.id }, _.omitBy(message, _.isNil)); + + return {}; + }, + [`${resource}Deleted`]: async function restoreDeleted(message: any, + ctx: any, config: any, eventName: string): Promise { + await db.delete(`${resource}s`, { id: message.id }); + return {}; + } + }; + } + + /** + * Check if the message contains buffered field, if so decode it. + * @param message + * @param collectionName + */ + decodeBufferField(message: any, resource: string): any { + if (this.bufferedCollection.has(resource)) { + const bufferField = this.bufferedCollection.get(resource); + // check if received message contains buffered data, if so + // decode the bufferField and store in DB + if (message[bufferField] && message[bufferField].value) { + message[bufferField] = JSON.parse(message[bufferField].value.toString()); + } + } + } + + /** + * + * @param msg google.protobuf.Any + * @returns Arbitrary JSON + */ + decodeMsg(msg: any): any { + return JSON.parse(Buffer.from(msg.value).toString()); + } + + /** + * + * @param msg Arbitrary JSON + * @returns google.protobuf.Any formatted message + */ + encodeMsg(msg: any): any { + if (msg) { + return { + type_url: 'payload', + value: Buffer.from(JSON.stringify(msg)) + }; + } + } + +} diff --git a/packages/chassis-srv/src/config/index.ts b/packages/chassis-srv/src/config/index.ts new file mode 100644 index 00000000..9531b150 --- /dev/null +++ b/packages/chassis-srv/src/config/index.ts @@ -0,0 +1,30 @@ +import {createServiceConfig, ServiceConfig} from '@restorecommerce/service-config'; + +// singleton +let config: ServiceConfig; + +/** + * Loads the configuration and stores it in the config singleton. + * @param {string} baseDir Directory which contains the folder cfg with the config files. + * @param [Logger] logger + */ +export const load = async(baseDir: string, logger?: any): Promise => { + return new Promise((resolve, reject) => { + config = createServiceConfig(baseDir, {logger}); + resolve(config); + }); +}; + +/** + * Get config from singleton. + * If singleton is undefined load id from current working directory. + * @param [Logger] logger + * @return {Object} nconf configuration object + */ +export const get = async(logger?: any): Promise => { + if (config) { + return config; + } + config = await load(process.cwd(), logger); + return config; +}; diff --git a/packages/chassis-srv/src/database/index.ts b/packages/chassis-srv/src/database/index.ts new file mode 100644 index 00000000..53074d66 --- /dev/null +++ b/packages/chassis-srv/src/database/index.ts @@ -0,0 +1,87 @@ +import { Logger } from 'winston'; +import { TraversalResponse } from './provider/arango/interface.js'; +import { + Vertices, + Collection, + Options as TraversalOptions, + Filters as GraphFilters, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/graph.js'; + +/** + * A key, value map containing database providers. + * Database providers are registered with the register function. + */ +const databases: any = {}; + +/** + * Register a database provider. + * + * @param {string} name database provider identifier + * @param {constructor} provider database provider constructor function + */ +export const register = (name: string, provider: any): any => { + databases[name] = provider; +}; + +import { create as arangodb } from './provider/arango/index.js'; +import { create as nedb } from './provider/nedb/index.js'; + +// Add default providers +register('arango', arangodb); +register('nedb', nedb); + +/** + * Get a new database connection. + * @param {Object} config Database configuration. + * @param [Logger] logger + * @return {Promise} New, active and ready database connection. + */ +export const get = async (config: any, logger?: Logger, graphName?: string, edgeConfig?: any): Promise => { + const db = databases[config.provider]; + if (!db) { + throw new Error(`database provider ${config.provider} does not exist`); + } + return db(config, logger, graphName, edgeConfig); +}; + +export interface DatabaseProvider { + insert(collectionName: string, documents: any): Promise>; + find(collectionName: string, filter?: any, options?: any): Promise>; + findByID(collectionName: string, ids: string | string[], options?: any): Promise>; + update(collectionName: string, documents: any): Promise>; + upsert(collectionName: string, documents: any): Promise>; + delete(collectionName: string, ids: string[]): Promise>; + count(collectionName: string, filter?: any): Promise; + truncate(collectionName?: string): Promise; + registerCustomQuery?: (name: string, script: string, type?: string) => any; + unregisterCustomQuery?: (name: string) => any; + listCustomQueries?: () => Array; + deleteAnalyzer(analyzerName: string[]): Promise; + dropView(viewName: string[]): Promise; +} + + +export interface GraphDatabaseProvider extends DatabaseProvider { + createGraphDB(graphName: string, edgeDefinitions: any, options: object): any; + createVertex(collectionName: string, data: any): any; + getVertex(collectionName: string, documentHandle: string): any; + removeVertex(collectionName: string, documentHandles: string | string[]): any; + getVertexCollection(collectionName: string): any; + listVertexCollections(excludeOrphans?: boolean): any; + getAllVertexCollections(excludeOrphans?: boolean): any; + addVertexCollection(collectionName: string): any; + removeVertexCollection(collectionName: string, dropCollection?: boolean): any; + getGraphDB(): any; + createEdge(collectionName: string, data: object, fromId?: string, toId?: string): any; + getEdge(collectionName: string, documentHandle: string): any; + getAllEdgesForVertice(collectionName: string, documentHandle: string): any; + getInEdges(collectionName: string, documentHandle: string): any; + getOutEdges(collectionName: string, documentHandle: string): any; + traversal(startVertex: Vertices, collectionName: Collection, opts: TraversalOptions, + filters?: GraphFilters[]): Promise; + addEdgeDefinition(collectionName: string, fromVertice: object | [object], + toVertice: object | [object]): any; + removeEdgeDefinition(definitionName: string, dropCollection?: boolean): any; + listGraphs(): any; + removeEdge(collectionName: string, documentHandle: string): any; +} diff --git a/packages/chassis-srv/src/database/provider/arango/base.ts b/packages/chassis-srv/src/database/provider/arango/base.ts new file mode 100644 index 00000000..7c3c1621 --- /dev/null +++ b/packages/chassis-srv/src/database/provider/arango/base.ts @@ -0,0 +1,648 @@ +import { Database, aql } from 'arangojs'; +import * as _ from 'lodash'; +import { + buildFilter, buildSorter, buildLimiter, buildReturn, + sanitizeInputFields, query, sanitizeOutputFields +} from './common.js'; +import { DatabaseProvider } from '../../index.js'; +import { ViewAnalyzerOptions, ViewMap } from './interface.js'; +import { type Logger } from '@restorecommerce/logger'; + +export interface CustomQuery { + code: string; // AQL code + // filter - combinable with the generic `find` query + // query - standalone + type: 'filter' | 'query'; +} +/** + * ArangoDB database provider. + */ +export class Arango implements DatabaseProvider { + public readonly customQueries = new Map(); + public readonly collectionNameAnalyzerMap = new Map(); + /** + * + * @param {Object} conn Arangojs database connection. + */ + constructor( + public readonly db: Database, + public readonly logger?: Logger, + ) {} + + /** + * Find documents based on filter. + * + * @param {String} collectionName Collection name + * @param {Object} filter Key, value Object + * @param {Object} options options.limit, options.offset + * @return {Promise} Promise for list of found documents. + */ + async find(collectionName: string, filter?: any, options?: any): Promise { + if (_.isNil(collectionName) || !_.isString(collectionName) || + _.isEmpty(collectionName)) { + this.logger?.error('invalid or missing collection argument for find operation'); + throw new Error('invalid or missing collection argument for find operation'); + } + + let filterQuery: any = filter ?? {}; + const opts = options ?? {}; + let filterResult: any; + let bindVars: any; + + let customFilter = ''; + // checking if a custom query should be used + if (!_.isEmpty(opts.customQueries)) { + for (const queryName of opts.customQueries) { + if (!this.customQueries.has(queryName)) { + this.logger?.error(`custom query ${query} not found`); + throw new Error(`custom query ${query} not found`); + } + const customQuery = this.customQueries.get(queryName); + if (customQuery.type == 'query') { + // standalone query + const result = await query(this.db, collectionName, customQuery.code, opts.customArguments || {}); // Cursor object + return result.all(); // TODO: paginate + } else { + // filter + customFilter += ` ${customQuery.code} `; + } + } + } + + if (!_.isArray(filterQuery)) { + filterQuery = [filterQuery]; + } + if (_.isEmpty(filterQuery[0])) { + filterQuery = true; + } + else { + filterResult = buildFilter(filterQuery); + filterQuery = filterResult.q; + } + + let sortQuery = buildSorter(opts); + const limitQuery = buildLimiter(opts); + const returnResult = buildReturn(opts); + let returnQuery = returnResult.q; + // return complete node in case no specific fields are specified + if (_.isEmpty(returnQuery)) { + returnQuery = 'RETURN node'; + } + // if search options are set build search query + let searchQuery; + if (opts?.search?.search) { + const searchString = JSON.stringify(options.search.search); + if (this.collectionNameAnalyzerMap && this.collectionNameAnalyzerMap.get(collectionName)) { + const searchFields = (options?.search?.fields?.length > 0) ? options.search.fields : this.collectionNameAnalyzerMap.get(collectionName).fields; + const similarityThreshold = this.collectionNameAnalyzerMap.get(collectionName).similarityThreshold; + const viewName = this.collectionNameAnalyzerMap.get(collectionName).viewName; + + const caseSensitive = options?.search?.case_sensitive; + const analyzerOptions: any = this.collectionNameAnalyzerMap.get(collectionName).analyzerOptions; + let analyzerName; + if (caseSensitive) { + // for casesensitive search use "ngram" analayzer type + analyzerOptions.forEach((optionObj: any) => { + const keyName = Object.keys(optionObj)[0]; + if (optionObj[keyName].type === 'ngram') { + analyzerName = JSON.stringify(keyName); + } + }); + } else { + // for case-insensitive search use "pipleline" type (ngram + norm) + analyzerOptions.forEach((optionObj: any) => { + const keyName = Object.keys(optionObj)[0]; + if (optionObj[keyName].type === 'pipeline') { + analyzerName = JSON.stringify(keyName); + } + }); + } + for (const field of searchFields) { + if (!searchQuery) { + searchQuery = `NGRAM_MATCH(node.${field}, ${searchString}, ${similarityThreshold}, ${analyzerName}) `; + } else { + searchQuery = searchQuery + `OR NGRAM_MATCH(node.${field}, ${searchString}, ${similarityThreshold}, ${analyzerName}) `; + } + } + // override collection name with view name + collectionName = viewName; + // override sortQuery (to rank based on score for frequency search match - term frequency–inverse document frequency algorithm TF-IDF) + sortQuery = `SORT TFIDF(node) DESC`; + } else { + this.logger?.info(`View and analyzer configuration data not set for ${collectionName} and hence ignoring search string`); + } + } + + let queryString = `FOR node in @@collection FILTER ${filterQuery}`; + if (searchQuery) { + queryString = `FOR node in @@collection SEARCH ${searchQuery} FILTER ${filterQuery}`; + } + if (!_.isEmpty(customFilter)) { + queryString += customFilter; + } + + queryString += ` ${sortQuery} + ${limitQuery} ${returnQuery}`; + + let varArgs = {}; + if (filterResult && filterResult.bindVarsMap) { + varArgs = filterResult.bindVarsMap; + } + let returnArgs = {}; + if (returnResult && returnResult.bindVarsMap) { + returnArgs = returnResult.bindVarsMap; + } + let limitArgs; + if (_.isEmpty(limitQuery)) { + limitArgs = {}; + } else { + if (!_.isNil(opts.limit)) { + limitArgs = { limit: opts.limit }; + if (!_.isNil(opts.offset)) { + limitArgs = { offset: opts.offset, limit: opts.limit }; + } + } + } + varArgs = _.assign(varArgs, limitArgs); + varArgs = _.assign(varArgs, returnArgs); + bindVars = _.assign({ + '@collection': collectionName + }, varArgs); + if (!_.isEmpty(customFilter) && opts.customArguments) { + bindVars = _.assign(bindVars, { customArguments: opts.customArguments }); + } + let res; + if (!searchQuery) { + res = await query(this.db, collectionName, queryString, bindVars); + } else { + res = await this.db.query(queryString, bindVars); + } + const docs = await res.all(); // TODO: paginate + + return _.map(docs, sanitizeOutputFields); + } + + /** + * Find documents by id (_key). + * + * @param {String} collection Collection name + * @param {String|array.String} ids A single ID or multiple IDs. + * @return {Promise} A list of found documents. + */ + async findByID(collectionName: string, ids: string | string[]): Promise { + if (_.isNil(collectionName) || !_.isString(collectionName) || + _.isEmpty(collectionName)) { + this.logger?.error('invalid or missing collection argument for findByID operation'); + throw new Error('invalid or missing collection argument for findByID operation'); + } + + if (_.isNil(ids)) { + this.logger?.error('invalid or missing ids argument for findByID operation'); + throw new Error('invalid or missing ids argument for findByID operation'); + } + if (!Array.isArray(ids)) { + ids = [ids as string]; + } + const filter = (ids as string[]).map((id) => { + return { id }; + }); + + const filterResult = buildFilter(filter); + const filterQuery = filterResult.q; + const varArgs = filterResult.bindVarsMap ?? {}; + const queryString = `FOR node in @@collection FILTER ${filterQuery} RETURN node`; + const bindVars = Object.assign({ + '@collection': collectionName + }, varArgs); + const res = await query(this.db, collectionName, queryString, bindVars); + const docs = await res.all(); + return _.map(docs, sanitizeOutputFields); + } + + /** + * retreive the documents including the document handlers (_key, _id and _rev). + * + * @param {String} collectionName Collection name + * @param {any} collection Collection Object + * @param {any} documents list of documents + * @param {string[]} idsArray list of document ids + * @returns {Promise} A list of documents including the document handlers + */ + async getDocumentHandlers(collectionName: string, collection: any, documents: any, + idsArray?: string[]): Promise { + let ids = []; + if (documents && !_.isArray(documents)) { + documents = [documents]; + } + if (documents && documents.length > 0) { + for (const doc of documents) { + ids.push(doc.id); + } + } + if (!_.isEmpty(idsArray) && _.isArray(idsArray)) { + ids = idsArray; + } + const queryString = aql`FOR node in ${collection} + FILTER node.id IN ${ids} return node`; + const res = await query(this.db, collectionName, queryString); + const docsWithSelector = await res.all(); + return docsWithSelector; + } + + /** + * Find documents by filter and updates them with document. + * + * @param {String} collection Collection name + * @param {Object} updateDocuments List of documents to update + */ + async update(collectionName: string, updateDocuments: any): Promise { + const documents = _.cloneDeep(updateDocuments); + const updateDocsResponse = []; + if (_.isNil(collectionName) || + !_.isString(collectionName) || _.isEmpty(collectionName)) { + this.logger?.error('invalid or missing collection argument for update operation'); + throw new Error('invalid or missing collection argument for update operation'); + } + if (_.isNil(documents)) { + this.logger?.error('invalid or missing document argument for update operation'); + throw new Error('invalid or missing document argument for update operation'); + } + const collection = this.db.collection(collectionName); + const collectionExists = await collection.exists(); + if (!collectionExists) { + this.logger?.error(`Collection ${collectionName} does not exist for update operation`); + throw new Error(`Collection ${collectionName} does not exist for update operation`); + } + if (!_.isArray(documents)) { + this.logger?.error(`Documents should be list for update operation`); + throw new Error(`Documents should be list for update operation`); + } + const docsWithHandlers = await this.getDocumentHandlers(collectionName, collection, documents); + + // update _key for the input documents + for (const document of documents) { + let foundInDB = false; + for (const docWithHandler of docsWithHandlers) { + if (docWithHandler.id === document.id) { + foundInDB = true; + document._key = docWithHandler._key; + break; + } + } + if (!foundInDB) { + // if document is not found in DB use the id itself as _key + // this key will return an array in response since it does not exist + document._key = document.id; + } + } + + const updatedDocs = await collection.updateAll(documents, { returnNew: true }); + for (const doc of updatedDocs) { + if ('new' in doc) { + updateDocsResponse.push(sanitizeOutputFields(doc?.new)); + } else { + updateDocsResponse.push(doc); + } + } + return updateDocsResponse; + } + + /** + * Find each document based on it's key and update it. + * If the document does not exist it will be created. + * + * @param {String} collectionName Collection name + * @param {Object|Array.Object} documents + */ + async upsert(collectionName: string, documents: any): Promise { + if (_.isNil(collectionName) || + !_.isString(collectionName) || _.isEmpty(collectionName)) { + this.logger?.error('invalid or missing collection argument for upsert operation'); + throw new Error('invalid or missing collection argument for upsert operation'); + } + if (_.isNil(documents)) { + this.logger?.error('invalid or missing documents argument for upsert operation'); + throw new Error('invalid or missing documents argument for upsert operation'); + } + let docs = _.cloneDeep(documents); + if (!_.isArray(documents)) { + docs = [documents]; + } + _.forEach(docs, (document, i) => { + docs[i] = sanitizeInputFields(document); + }); + const upsertResponse = []; + const collection = this.db.collection(collectionName); + const collectionExists = await collection.exists(); + if (!collectionExists) { + this.logger?.error(`Collection ${collectionName} does not exist for upsert operation`); + throw new Error(`Collection ${collectionName} does not exist for upsert operation`); + } + let upsertedDocs = await collection.saveAll(docs, { returnNew: true, overwriteMode: 'update' }); + if (!_.isArray(upsertedDocs)) { + upsertedDocs = [upsertedDocs]; + } + for (const doc of upsertedDocs) { + if ('new' in doc) { + upsertResponse.push(sanitizeOutputFields(doc.new)); + } else { + upsertResponse.push(doc); + } + } + return upsertResponse; + } + + /** + * Delete all documents with provided identifiers ids. + * + * @param {String} collection Collection name + * @param {Object} ids list of document identifiers + * @return {Promise} delete response + */ + async delete(collectionName: string, ids: string[]): Promise { + if (_.isNil(collectionName) || + !_.isString(collectionName) || _.isEmpty(collectionName)) { + this.logger?.error('invalid or missing collection argument'); + throw new Error('invalid or missing collection argument'); + } + if (_.isNil(ids) || _.isEmpty(ids)) { + this.logger?.error('invalid or missing document IDs argument for delete operation'); + throw new Error('invalid or missing document IDs argument for delete operation'); + } + const collection = this.db.collection(collectionName); + const collectionExists = await collection.exists(); + if (!collectionExists) { + this.logger?.error(`Collection ${collectionName} does not exist for delete operation`); + throw new Error(`Collection ${collectionName} does not exist for delete operation`); + } + + // retreive _key for the give ids + const docsWithHandlers = await this.getDocumentHandlers(collectionName, collection, null, ids); + for (const id of ids) { + // check if given id is present in docsWithHandlers + let foundDocInDB = false; + for (const doc of docsWithHandlers) { + if (doc.id === id) { + foundDocInDB = true; + break; + } + } + // if document is not found in DB use the id itself as _key + // this key will return an array in response since it does not exist + if (!foundDocInDB) { + docsWithHandlers.push({ _key: id }); + } + } + const deleteHandlerIds = []; + for (const doc of docsWithHandlers) { + deleteHandlerIds.push(doc._key); + } + + return collection.removeAll(deleteHandlerIds); + } + + /** + * Count all documents selected by filter. + * + * @param {String} collection Collection name + * @param {Object} filter + */ + async count(collectionName: string, filter: any): Promise { + if (_.isNil(collectionName) || + !_.isString(collectionName) || _.isEmpty(collectionName)) { + this.logger?.error('invalid or missing collection argument for count operation'); + throw new Error('invalid or missing collection argument for count operation'); + } + let filterQuery: any = filter || {}; + let filterResult: any; + if (!_.isArray(filterQuery)) { + filterQuery = [filterQuery]; + } + + if (_.isEmpty(filterQuery[0])) { + filterQuery = true; + } + else { + filterResult = buildFilter(filterQuery); + filterQuery = filterResult.q; + } + + let varArgs = {}; + if (filterResult && filterResult.bindVarsMap) { + varArgs = filterResult.bindVarsMap; + } + const queryString = `FOR node in @@collection FILTER ${filterQuery} COLLECT WITH COUNT + INTO length RETURN length`; + const bindVars = Object.assign({ + '@collection': collectionName + }, varArgs); + + const res = await query(this.db, collectionName, queryString, bindVars); + const nn = await res.all(); + return nn[0]; + } + + /** + * When calling without a collection name, + * delete all documents in all collections in the database. + * When providing a collection name, + * delete all documents in specified collection in the database. + * @param [string] collection Collection name. + */ + async truncate(collection: string): Promise { + if (_.isNil(collection)) { + const collections = await this.db.collections(); + for (let i = 0; i < collections.length; i += 1) { + const c = this.db.collection(collections[i].name); + await c.truncate(); + } + } else { + const c = this.db.collection(collection); + await c.truncate(); + } + } + + /** + * Drop view + * @param string[] list of view names. + */ + async dropView(viewName: string[]): Promise { + const dropViewResponse = []; + if (viewName.length > 0) { + for (const view of viewName) { + try { + const response = await this.db.view(view).drop(); + this.logger?.info(`View ${view} dropped successfully`, response); + if (response === true) { + dropViewResponse.push({ id: view, code: 200, message: `View ${view} dropped successfully` }); + } + } catch (err: any) { + this.logger?.error(`Error dropping View ${view}`, { code: err.code, message: err.message, stack: err.stack }); + dropViewResponse.push({ id: view, code: err.code, message: err.message }); + } + } + } + return dropViewResponse; + } + + /** + * Delete Analyzer + * @param string[] list of analyzer names. + */ + async deleteAnalyzer(analyzerName: string[]): Promise { + const deleteResponse = []; + if (analyzerName.length > 0) { + for (const analyzer of analyzerName) { + try { + const response = await this.db.analyzer(analyzer).drop(); + this.logger?.info(`Analyzer ${analyzer} deleted successfully`, response); + if (response.code === 200 && response.error === false) { + deleteResponse.push({ id: analyzer, code: response.code, message: `Analyzer ${analyzer} deleted successfully` }); + } + } catch (err: any) { + this.logger?.error(`Error deleting analyzer ${analyzer}`, { code: err.code, message: err.message, stack: err.stack }); + deleteResponse.push({ id: analyzer, code: err.code, message: err.message }); + } + } + } + return deleteResponse; + } + + /** + * Insert documents into database. + * + * @param {String} collection Collection name + * @param {Object|array.Object} documents A single or multiple documents. + */ + async insert(collectionName: string, documents: any): Promise { + if (_.isNil(collectionName) || !_.isString(collectionName) || _.isEmpty(collectionName)) { + this.logger?.error('invalid or missing collection argument for insert operation'); + throw new Error('invalid or missing collection argument for insert operation'); + } + if (_.isNil(documents)) { + this.logger?.error('invalid or missing documents argument for insert operation'); + throw new Error('invalid or missing documents argument for insert operation'); + } + let docs = _.cloneDeep(documents); + if (!_.isArray(documents)) { + docs = [documents]; + } + _.forEach(docs, (document, i) => { + docs[i] = sanitizeInputFields(document); + }); + const collection = this.db.collection(collectionName); + const collectionExists = await collection.exists(); + if (!collectionExists) { + await collection.create(); + } + const insertResponse = []; + let createdDocs = await collection.saveAll(docs, { returnNew: true }); + if (!_.isArray(createdDocs)) { + createdDocs = [createdDocs]; + } + for (const doc of createdDocs) { + if ('new' in doc) { + insertResponse.push(sanitizeOutputFields(doc.new)); + } else { + insertResponse.push(doc); + } + } + return insertResponse; + } + + /** + * Registers a custom AQL query. + * + * @param script + * @param name + */ + // @ts-expect-error TS2416 + registerCustomQuery(name: string, script: string, type: 'filter' | 'query'): void { + this.customQueries.set(name, { + code: script, + type + }); + } + + /** + * Unregisters a custom query. + * @param name + */ + unregisterCustomQuery(name: string): void { + if (!this.customQueries.has(name)) { + this.logger?.error('custom function not found'); + throw new Error('custom function not found'); + } + this.customQueries.delete(name); + } + + listCustomQueries(): Array { + return [...this.customQueries]; + } + + async createAnalyzerAndView(viewConfig: ViewAnalyzerOptions, collectionName: string): Promise { + if (!viewConfig.view.viewName || !viewConfig?.view?.options) { + this.logger?.error(`View name or view configuration missing for ${collectionName}`); + throw new Error(`View name or view configuration missing for ${collectionName}`); + } + if ((!viewConfig?.analyzers) || (viewConfig.analyzers.length === 0) || !(viewConfig.analyzerOptions)) { + this.logger?.error(`Analyzer options or configuration missing for ${collectionName}`); + throw new Error(`Analyzer options or configuration missing for ${collectionName}`); + } + // create analyzer if it does not exist + for (const analyzerName of viewConfig.analyzers) { + const analyzer = this.db.analyzer(analyzerName); + if (!(await analyzer.exists())) { + try { + const analyzerCfg = viewConfig.analyzerOptions.filter((optionsCfg) => Object.keys(optionsCfg)[0] === analyzerName); + if (analyzerCfg?.length === 1) { + await analyzer.create(analyzerCfg[0][analyzerName] as any); + this.logger?.info(`Analyzer ${analyzerName} created successfully`); + } + } catch (err: any) { + this.logger?.error(`Error creating analyzer ${analyzerName}`, { code: err.code, message: err.message, stack: err.stack }); + } + } else { + this.logger?.info(`Analyzer ${analyzerName} already exists`); + } + } + + // check if collection exits (before creating view) + const collection = this.db.collection(collectionName); + const collectionExists = await collection.exists(); + try { + if (!collectionExists) { + await collection.create(); + this.logger?.info(`Collection ${collectionName} created successfully`); + } + } catch (err: any) { + if (err.message && err.message.indexOf('duplicate name') == -1) { + this.logger?.error(`Error creating collection ${collectionName}`, { code: err.code, message: err.message, stack: err.stack }); + throw err; + } + } + + // create view if it does not exist + const view = this.db.view(viewConfig.view.viewName); + const viewExists = await view.exists(); + if (!viewExists) { + try { + await this.db.createView(viewConfig?.view?.viewName, viewConfig?.view?.options); + this.logger?.info(`View ${viewConfig?.view?.viewName} created successfully`); + } catch (err: any) { + this.logger?.error(`Error creating View ${viewConfig?.view?.viewName}`, { code: err.code, message: err.message, stack: err.stack }); + } + } else { + this.logger?.info(`View ${viewConfig?.view?.viewName} already exists`); + } + // map the collectionName with list of indexed fields, view name, analyzerslist, similarity threshold + // to be used in find() + const indexedFields = Object.keys(viewConfig.view.options.links[collectionName].fields); + this.collectionNameAnalyzerMap.set(collectionName, { + viewName: viewConfig.view.viewName, + fields: indexedFields, + analyzerOptions: viewConfig.analyzerOptions, + similarityThreshold: viewConfig.view.similarityThreshold + }); + } +} diff --git a/packages/chassis-srv/src/database/provider/arango/common.ts b/packages/chassis-srv/src/database/provider/arango/common.ts new file mode 100644 index 00000000..0d666b42 --- /dev/null +++ b/packages/chassis-srv/src/database/provider/arango/common.ts @@ -0,0 +1,414 @@ +import * as _ from 'lodash'; +import Long from 'long'; + +/** + * Ensure that the collection exists and process the query + * @param {Object} db arangodb connection + * @param {string} collectionName collection name + * @param {string} query query string + * @param {Object} args list of arguments, optional + * @return {Promise} arangojs query result + */ +export const query = async (db: any, collectionName: string, query: string | any, + args?: object): Promise => { + const collection = db.collection(collectionName); + const collectionExists = await collection.exists(); + try { + if (!collectionExists) { + await collection.create(); + } + } catch(err: any) { + if (err.message && err.message.indexOf('duplicate name') == -1) { + throw err; + } + } + return await db.query(query, args); +}; + +/** + * Convert id to arangodb friendly key. + * @param {string} id document identification + * @return {any} arangodb friendly key + */ +export const idToKey = (id: string): any => { + return id.replace(/\//g, '_'); +}; + +/** + * Ensure that the _key exists. + * @param {Object} document Document template. + * @return {any} Clone of the document with the _key field set. + */ +const ensureKey = (document: any): any => { + const doc = _.clone(document); + if (_.has(doc, '_key')) { + return doc; + } + const id = (doc as any).id; + if (id) { + _.set(doc, '_key', idToKey(id)); + } + return doc; +}; + +const ensureDatatypes = (document: any): any => { + const doc = _.clone(document); + const keys = _.keys(doc); + for (const key of keys) { + if (Long.isLong(doc[key])) { + doc[key] = (doc[key] as Long).toNumber(); + } + } + return doc; +}; + +/** + * Remove arangodb specific fields. + * @param {Object} document A document returned from arangodb. + * @return {Object} A clone of the document without arangodb specific fields. + */ +export const sanitizeOutputFields = (document: object): object => { + const doc = _.clone(document); + _.unset(doc, '_id'); + _.unset(doc, '_key'); + _.unset(doc, '_rev'); + return doc; +}; + +export const sanitizeInputFields = (document: any): any => { + const doc = ensureDatatypes(document); + return ensureKey(doc); +}; + +/** + * Auto-casting reference value by using native function of arangoDB + * + * @param {string} key + * @param {object} value - raw value optional + * @return {object} interpreted value + */ +export const autoCastKey = (key: any, value?: any): any => { + if (_.isDate(value)) { // Date + return `DATE_TIMESTAMP(node.${key})`; + } + return 'node.' + key; +}; + +/** + * Auto-casting raw data + * + * @param {object} value - raw value + * @returns {any} interpreted value + */ +export const autoCastValue = (value: any): any => { + if (_.isArray(value)) { + return value.map(value => value.toString()); + } + if (_.isString(value)) { // String + return value; + } + if (_.isBoolean(value)) { // Boolean + return Boolean(value); + } + if (_.isNumber(value)) { + return _.toNumber(value); + } + if (Long.isLong(value)) { + return (value as Long).toNumber(); + } + if (_.isDate(value)) { // Date + return new Date(value); + } + return value; +}; + +/** + * Links children of filter together via a comparision operator. + * @param {any} filter + * @param {string} op comparision operator + * @param {number} index to keep track of bind variables + * @param {any} bindVarsMap mapping of keys to values for bind variables + * @return {any} query template string and bind variables + */ + +export const buildComparison = (filter: any, op: string, index: number, + bindVarsMap: any): any => { + const ele = _.map(filter, (e) => { + if (!_.isArray(e)) { + e = [e]; + } + e = buildFilter(e, index, bindVarsMap); + index += 1; + return e.q; + }); + + let q = '( '; + for (let i = 0; i < ele.length; i += 1) { + if (i == ele.length - 1) { + q = `${q} ${ele[i]} )`; + } else { + q = `${q} ${ele[i]} ${op} `; + } + } + return { q, bindVarsMap }; +}; + +/** + * Creates a filter key, value. + * When the value is a string, boolean, number or date a equal comparision is created. + * Otherwise if the key corresponds to a known operator, the operator is constructed. + * @param {string} key + * @param {string|boolean|number|date|object} value + * @param {number} index to keep track of bind variables + * @param {any} bindVarsMap mapping of keys to values for bind variables + * @return {String} query template string + */ +export const buildField = (key: any, value: any, index: number, bindVarsMap: any): string => { + const bindValueVar = `@value${index}`; + const bindValueVarWithOutPrefix = `value${index}`; + if (_.isString(value) || _.isBoolean(value) || _.isNumber(value || _.isDate(value))) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value); + return autoCastKey(key, value) + ' == ' + bindValueVar; + } + if (!_.isNil(value.$eq)) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value.$eq); + return autoCastKey(key, value) + ' == ' + bindValueVar; + } + if (value.$gt) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value.$gt); + return autoCastKey(key, value) + ' > ' + bindValueVar; + } + if (value.$gte) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value.$gte); + return autoCastKey(key, value) + ' >= ' + bindValueVar; + } + if (value.$lt) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value.$lt); + return autoCastKey(key, value) + ' < ' + bindValueVar; + } + if (value.$lte) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value.$lte); + return autoCastKey(key, value) + ' <= ' + bindValueVar; + } + if (!_.isNil(value.$ne)) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value.$ne); + return autoCastKey(key, value) + ' != ' + bindValueVar; + } + if (value.$inVal) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value.$inVal); + return bindValueVar + ' IN ' + autoCastKey(key, value); + } + if (value.$in) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value.$in); + if (_.isString(value.$in)) { + // if it is a field which should be an array + // (useful for querying within a document list-like attributen + return bindValueVar + ' IN ' + autoCastKey(key); + } + // assuming it is a list of provided values + return autoCastKey(key, value) + ' IN ' + bindValueVar; + } + if (value.$nin) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value.$nin); + return autoCastKey(key, value) + ' NOT IN ' + bindValueVar; + } + if (value.$iLike) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(value.$iLike); + // @param 'true' is for case insensitive + return ' LIKE (' + autoCastKey(key, value) + ',' + bindValueVar + ', true)'; + } + if (!_.isNil(value.$not)) { + const temp = buildField(key, value.$not, index, bindVarsMap); + return `!(${temp})`; + } + if (_.has(value, '$isEmpty')) { + bindVarsMap[bindValueVarWithOutPrefix] = autoCastValue(''); + // will always search for an empty string + return autoCastKey(key, '') + ' == ' + bindValueVar; + } + if (!_.isNil((value as any).$startswith)) { + const bindValueVar1 = `@value${index + 1}`; + const bindValueVarWithOutPrefix1 = `value${index + 1}`; + const k = autoCastKey(key); + const v = autoCastValue((value as any).$startswith); + bindVarsMap[bindValueVarWithOutPrefix] = v; + bindVarsMap[bindValueVarWithOutPrefix1] = v; + return `LEFT(${k}, LENGTH(${bindValueVar})) == ${bindValueVar1}`; + } + if (!_.isNil((value as any).$endswith)) { + const bindValueVar1 = `@value${index + 1}`; + const bindValueVarWithOutPrefix1 = `value${index + 1}`; + const k = autoCastKey(key); + const v = autoCastValue((value as any).$endswith); + bindVarsMap[bindValueVarWithOutPrefix] = v; + bindVarsMap[bindValueVarWithOutPrefix1] = v; + return `RIGHT(${k}, LENGTH(${bindValueVar})) == ${bindValueVar1}`; + } + throw new Error(`unsupported operator ${_.keys(value)} in ${key}`); +}; + +/** + * Build ArangoDB query based on filter. + * @param {Object} filter key, value tree object. + * @param {number} index to keep track of bind variables + * @param {any} bindVarsMap mapping of keys to values for bind variables + * @return {any} query template string and bind variables + */ +export const buildFilter = (filter: any, index?: number, bindVarsMap?: any): any => { + if (!index) { + index = 0; + } + if (!bindVarsMap) { + bindVarsMap = {}; + } + if (filter.length > 0) { + let q: any = ''; + let multipleFilters = false; + for (const eachFilter of filter) { + _.forEach(eachFilter, (value, key) => { + switch (key) { + case '$or': + if (!multipleFilters) { + if (_.isEmpty(value)) { + q = true; + } else { + q = buildComparison(value, '||', index, bindVarsMap).q; + } + + multipleFilters = true; + // since there is a possiblility for recursive call from buildComparision to buildFilter again. + index += 1; + } else { + q = q + '&& ' + buildComparison(value, '||', index, bindVarsMap).q; + index += 1; + } + break; + case '$and': + if (!multipleFilters) { + if (_.isEmpty(value)) { + q = false; + } else { + q = buildComparison(value, '&&', index, bindVarsMap).q; + } + multipleFilters = true; + index += 1; + } else { + q = q + '&& ' + buildComparison(value, '&&', index, bindVarsMap).q; + index += 1; + } + break; + default: + if (_.startsWith(key, '$')) { + throw new Error(`unsupported query operator ${key}`); + } + if (!multipleFilters) { + q = buildField(key, value, index, bindVarsMap); + multipleFilters = true; + index += 1; + } else { + q = q + ' && ' + buildField(key, value, index, bindVarsMap); + index += 1; + } + break; + } + }); + } + return { q, bindVarsMap }; + } +}; + +/** + * Build count and offset filters. + * @param {Object} options query options + * @return {String} template query string + */ +export const buildLimiter = (options: any): string => { + // LIMIT count + // LIMIT offset, count + if (!_.isNil(options.limit)) { + if (!_.isNil(options.offset)) { + return `LIMIT @offset, @limit`; + } + return `LIMIT @limit`; + } + return ''; +}; + +/** + * Build sort filter. + * @param {Object} options query options + * @param {number} index to keep track of bind variables + * @param {any} bindVarsMap Object containing bind key to values + * @return {any} template query string and bind variables Object + */ +export const buildSorter = (options: any, index?: number, bindVarsMap?: any): any => { + if (_.isNil(options.sort) || _.isEmpty(options.sort)) { + return ''; + } + + if (!index) { + index = 0; + } + if (!bindVarsMap) { + bindVarsMap = {}; + } + + const sort = _.mapKeys(options.sort, (value, key) => { + return autoCastKey(key); + }); + let sortKeysOrder = ''; + let i = 1; + const objLength = Object.keys(sort).length; + for (const key in sort) { + if (objLength == i) { + // Do not append ',' for the last element + sortKeysOrder = `${sortKeysOrder} ${key} ${sort[key]} `; + } else { + sortKeysOrder = `${sortKeysOrder} ${key} ${sort[key]},`; + } + i += 1; + } + return 'SORT ' + sortKeysOrder; +}; + +export const buildReturn = (options: any): any => { + let excludeIndex = 0; + let includeIndex = 0; + const bindVarsMap: Record = {}; + let q = ''; + if (_.isNil(options.fields) || _.isEmpty(options.fields)) { + return { q, bindVarsMap }; + } + const keep: any[] = []; + const exclude: any[] = []; + _.forEach(options.fields, (value, key) => { + switch (value) { + case 0: + bindVarsMap[`exclude${excludeIndex}`] = key; + exclude.push(`@exclude${excludeIndex}`); + excludeIndex += 1; + break; + case 1: + default: + bindVarsMap[`include${includeIndex}`] = key; + keep.push(`@include${includeIndex}`); + includeIndex += 1; + } + }); + if (keep.length > 0) { + const include = _.join(_.map(keep, (e) => { return e; })); + q = `RETURN KEEP( node, ${include} )`; + return { q, bindVarsMap }; + } + if (exclude.length > 0) { + const unset = _.join(_.map(exclude, (e) => { return e; })); + q = `RETURN UNSET( node, ${unset} )`; + return { q, bindVarsMap }; + } + q = 'RETURN result'; + return { q, bindVarsMap }; +}; + +export const encodeMessage = (object: object) => { + return Buffer.from(JSON.stringify(object)); +}; diff --git a/packages/chassis-srv/src/database/provider/arango/graph.ts b/packages/chassis-srv/src/database/provider/arango/graph.ts new file mode 100644 index 00000000..978df851 --- /dev/null +++ b/packages/chassis-srv/src/database/provider/arango/graph.ts @@ -0,0 +1,588 @@ +import * as _ from 'lodash'; +import { Database } from 'arangojs'; +import { Arango } from './base.js'; +import { sanitizeInputFields, sanitizeOutputFields } from './common.js'; +import { GraphDatabaseProvider } from '../../index.js'; +import { Graph } from 'arangojs/graph'; +import { ArangoCollection } from 'arangojs/collection'; +import { buildGraphFilter, buildGraphLimiter, buildGraphSorter, createGraphsAssociationFilter } from './utils.js'; +import { + Vertices, + Collection, + Options as TraversalOptions, + Filters as GraphFilters, + Options_Direction as Direction, + DeepPartial +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/graph.js'; +import { TraversalResponse } from './interface.js'; +import { type Logger } from '@restorecommerce/logger'; + +// @ts-expect-error TS2420 +export class ArangoGraph extends Arango implements GraphDatabaseProvider { + constructor( + db: Database, + public readonly graph: Graph, + public readonly edgeDefConfig: any, + public readonly logger?: Logger, + ) { + super(db); + } + + /** + * create a Graph instance. + * + * @param {String} graphName graph name + * @param edgeDefinitions — Definitions for the relations of the graph. + * @param options — Options for creating the graph. + * @return {Object} A Graph instance + */ + async createGraphDB(graphName: string, edgeDefinitions: any, options: object): Promise { + if (!this.graph) { + if (_.isNil(graphName)) { + throw new Error('missing graph name'); + } + const graph = this.db.graph(graphName); + try { + await graph.create(edgeDefinitions, options); + } catch (err: any) { + if (err.message === 'graph already exists') { + return this.graph; + } + throw { code: err.code, message: err.message }; + } + return graph; + } else { + return this.graph; + } + } + + + /** + * create a new Vertex with given data. + * + * @param {string} collectionName vertex collection name + * @param {Object} data data for vertex + * @return {Object} created vertex + */ + async createVertex(collectionName: string, data: any): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing vertex collection name'); + } + if (_.isNil(data)) { + throw new Error('missing data for vertex'); + } + const collection = this.graph.vertexCollection(collectionName); + let docs = _.cloneDeep(data); + if (!_.isArray(docs)) { + docs = [docs]; + } + _.forEach(docs, (document, i) => { + docs[i] = sanitizeInputFields(document); + }); + const responseDocs = []; + for (const eachDoc of docs) { + let result: any; + try { + result = await collection.save(eachDoc); + if (!result.error) { + responseDocs.push(eachDoc); + } + } catch (e: any) { + responseDocs.push({ + error: true, + errorNum: e.code, + errorMessage: e.message + }); + } + } + return _.map(responseDocs, sanitizeOutputFields); + } + + /** + * Retreives the vertex with the given documentHandle from the collection. + * + * @param {string} collectionName vertex collection name + * @param {string} documentHandle The handle of the vertex to retrieve. + * This can be either the _id or the _key of a vertex in the collection, + * or a vertex (i.e. an object with an _id or _key property). + * @return {Object} created vertex + */ + async getVertex(collectionName: string, documentHandle: string): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing vertex collection name'); + } + if (_.isNil(documentHandle)) { + throw new Error('missing document handle'); + } + const collection = this.graph.vertexCollection(collectionName); + const doc = await collection.vertex(documentHandle); + return doc; + } + + /** + * Deletes the vertex with the given documentHandle from the collection. + * + * @param {string} collectionName vertex collection name + * @param {string[]} documentHandles An array of the documentHandles to be removed. + * This can be either the _id or the _key of a vertex in the collection, + * or a vertex (i.e. an object with an _id or _key property). + * @return {Object} removed vertex + */ + async removeVertex(collectionName: string, documentHandles: string | string[]): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing vertex collection name'); + } + if (_.isNil(documentHandles)) { + throw new Error('missing document handle property'); + } + if (!_.isArray(documentHandles)) { + documentHandles = [documentHandles as string]; + } + const collection = this.graph.vertexCollection(collectionName); + const removedVertexList = []; + for (const documentHandle of documentHandles) { + const id = documentHandle.split('/')[1]; + const removed: any = await collection.remove(documentHandle); + if (!removed.error) { + removedVertexList.push({ _id: documentHandle, _key: id, _rev: id }); + } + } + return removedVertexList; + } + + /** + * gets a new GraphVertexCollection instance with the given name for this graph. + * + * @param {string} collectionName The handle of the vertex to retrieve. + * This can be either the _id or the _key of a vertex in the collection, + * or a vertex (i.e. an object with an _id or _key property). + * @return {Object} created vertex + */ + async getVertexCollection(collectionName: string): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing vertex collection name'); + } + + const collection = await this.graph.vertexCollection(collectionName); + return collection; + } + + /** + * Fetches all vertex collections from the graph and returns + * an array of collection descriptions. + * + * @return {Array} vertex list + */ + async listVertexCollections(): Promise { + const collections = await this.graph.listVertexCollections(); + return collections; + } + + /** + * Fetches all vertex collections from the database and returns an array + * of GraphVertexCollection instances for the collections. + * + * @return {Array} vertex list + */ + async getAllVertexCollections(): Promise { + const collections = await this.graph.vertexCollections(); + return collections; + } + + /** + * Adds the collection with the given collectionName to the graph's + * vertex collections. + * + * @param {string} collectionName Name of the vertex collection to add to the graph. + * @param {boolean} excludeOrphans Whether orphan collections should be excluded. + * @return {Array} vertex list + */ + async addVertexCollection(collectionName: string): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing vertex collection name'); + } + let collection; + try { + collection = await this.graph.addVertexCollection(collectionName); + } catch (err: any) { + if (err.message.indexOf('collection already used in edge def') > -1 || err.message.indexOf('collection used in orphans') > -1) { + return collection; + } + throw new Error(err.message); + } + return collection; + } + + /** + * Removes the vertex collection with the given collectionName from the graph. + * + * @param {string} collectionName Name of the vertex collection to remove from the graph. + * @param {boolean} dropCollection If set to true, the collection will + * also be deleted from the database. + * @return {Object } removed vertex + */ + async removeVertexCollection(collectionName: string, dropCollection?: boolean): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing vertex collection name'); + } + dropCollection ??= false; + const collection = await this.graph.removeVertexCollection( + collectionName, + dropCollection + ); + return collection; + } + + /** + * @return {Graph} A Graph instance + */ + getGraphDB(): Graph { + return this.graph; + } + + /** + * Creates a new edge between the vertices fromId and toId with the given data. + * + * @param {string} collectionName name of the edge collection + * @param {Object} data The data of the new edge. If fromId and toId are not + * specified, the data needs to contain the properties _from and _to. + * @param {string} fromId The handle of the start vertex of this edge. + * This can be either the _id of a document in the database, the _key of an + * edge in the collection, or a document (i.e. an object with an _id or _key property). + * @param {string} toId The handle of the end vertex of this edge. + * This can be either the _id of a document in the database, the _key of an + * edge in the collection, or a document (i.e. an object with an _id or _key property). + * @return {Object} edge object + */ + async createEdge(collectionName: string, data: object, fromId?: string, + toId?: string): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing edge collection name'); + } + if (_.isNil(data)) { + data = {}; + } + + const collection = this.graph.edgeCollection(collectionName); + if (fromId) { + Object.assign(data, { _from: fromId }); + } + if (toId) { + Object.assign(data, { _to: toId }); + } + return collection.save(data); + } + + /** + * Retrieves the edge with the given documentHandle from the collection. + * + * @param {String} collectionName collection name + * @param {String} documentHandle edge key + * @return {Object} edge object + */ + async getEdge(collectionName: string, documentHandle: string): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing edge collection name'); + } + if (_.isNil(documentHandle)) { + throw new Error('missing docuemnt handle'); + } + const collection = this.graph.edgeCollection(collectionName); + return collection.edge(documentHandle); + } + + /** + * Retrieves a list of all edges of the document with the given documentHandle. + * + * @param {String} collectionName edge collection name + * @param {String} documentHandle The handle of the document to retrieve + * the edges of. This can be either the _id of a document in the database, + * the _key of an edge in the collection, or a document + * (i.e. an object with an _id or _key property). + * @return {Object} edge object + */ + async getAllEdgesForVertice(collectionName: string, documentHandle: string): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing edge collection name'); + } + if (_.isNil(documentHandle)) { + throw new Error('missing document handle'); + } + const collection = this.graph.edgeCollection(collectionName).collection; + return await collection.edges(documentHandle, {}); + } + + /** + * get all incoming edges. + * + * @param {String} collectionName edge collection name + * @param {String} documentHandle The handle of the document + * @return {[Object]} list of edges + */ + async getInEdges(collectionName: string, documentHandle: string): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing edge name'); + } + if (_.isNil(documentHandle)) { + throw new Error('missing document handle'); + } + const collection = this.graph.edgeCollection(collectionName).collection; + return await collection.inEdges(documentHandle, {}); + } + + /** + * get all outgoing edges. + * + * @param {String} collectionName edge collection name + * @param {String} documentHandle The handle of the document + * @return {[Object]} list of edges + */ + async getOutEdges(collectionName: string, documentHandle: string): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing edge collection name'); + } + if (_.isNil(documentHandle)) { + throw new Error('missing document handle'); + } + const collection = this.graph.edgeCollection(collectionName).collection; + return collection.outEdges(documentHandle, {}); + } + + /** + * collection traversal - Performs a traversal starting from the given + * startVertex and following edges contained in this edge collection. + * + * @param {String} collectionName collection name + * @param {String | String[]} startVertex Start vertex or vertices. + * This can be either the _id of a document in the database, + * the _key of an edge in the collection, or a document + * (i.e. an object with an _id or _key property). + * @param {any} opts opts.direction opts.filter, opts.visitor, + * opts.init, opts.expander, opts.sort + * @return {[Object]} edge traversal path + */ + async traversal(vertices: Vertices, collection: Collection, opts: DeepPartial, + filters?: GraphFilters[]): Promise { + if (vertices) { + if (_.isEmpty(vertices.collection_name) && !_.isEmpty(vertices.start_vertex_ids)) { + throw new Error(`missing collection name for vertex id ${vertices.start_vertex_ids}`); + } else if (!_.isEmpty(vertices.collection_name) && _.isEmpty(vertices.start_vertex_ids)) { + throw new Error(`missing vertex id for collection_name ${vertices.collection_name}`); + } + } + + // vertices data + let vertexCollectionName, startVertexIds; + if (vertices) { + vertexCollectionName = vertices.collection_name; + startVertexIds = vertices.start_vertex_ids; + } + + // collection data + let collectionName, limit, offset, sort; + if (collection) { + collectionName = collection.collection_name; + limit = collection.limit; + offset = collection.offset; + sort = collection.sorts; + } + + if ((_.isUndefined(startVertexIds) || _.isNil(startVertexIds) || _.isEmpty(startVertexIds)) && + (_.isUndefined(collectionName) || _.isNil(collectionName) || _.isEmpty(collectionName))) { + throw new Error('One of the Vertices or Collection should be defined'); + } + + // from either vertices or collections + const traversalCollectionName = collectionName && !_.isEmpty(collectionName) ? collectionName : vertexCollectionName; + + if (!opts) { + opts = {}; + } + // make outbound traversal by default if not provided + if (opts.direction === undefined) { + opts.direction = Direction.OUTBOUND; + } + + // default options + let defaultOptions: any = { uniqueVertices: 'global', bfs: true, uniqueEdges: 'path' }; + let filter = ''; + let rootFilter = ''; + let limitFilter = ''; + let sortFilter = ''; + // include vertices in options if specified + if (opts.include_vertexs) { + defaultOptions.vertexCollections = opts.include_vertexs; + } + + // include edges in options if specified + if (opts.include_edges) { + defaultOptions.edgeCollections = opts.include_edges; + } + + // exclude vertices + if (opts.exclude_vertexs) { + for (const excludeVertex of opts.exclude_vertexs) { + filter = filter + ` FILTER v._id NOT LIKE "${excludeVertex}%" `; + } + } + + // exclude edges + if (opts.exclude_edges) { + for (const excludeEdge of opts.exclude_edges) { + filter = filter + ` FILTER e._id NOT LIKE "${excludeEdge}%" `; + } + } + + const rootAndAssociationFilter = createGraphsAssociationFilter(filters, + opts.direction, traversalCollectionName, this.edgeDefConfig, filter); + // association fitler + filter = rootAndAssociationFilter.associationFilter; + // root filter + const rootEntityFilter = rootAndAssociationFilter.rootEntityFilter; + if (rootEntityFilter) { + rootFilter = buildGraphFilter([rootEntityFilter], true).q; + } + + if (startVertexIds && startVertexIds.length > 0) { + if (rootFilter && !_.isEmpty(rootFilter)) { + rootFilter = ` obj.id IN ${JSON.stringify(startVertexIds)} && ${rootFilter}`; + } else { + rootFilter = ` obj.id IN ${JSON.stringify(startVertexIds)} `; + } + } + + // combined root filter + if (rootFilter && !_.isEmpty(rootFilter)) { + rootFilter = `FILTER ${rootFilter}`; + } + + limitFilter = buildGraphLimiter(limit, offset); + + if (sort) { + sortFilter = buildGraphSorter(sort); + } + + let rootCursor, associationCursor; + try { + defaultOptions = JSON.stringify(defaultOptions); + // traversal data + const traversalQuery = `For obj IN ${traversalCollectionName} ${rootFilter} ${limitFilter} ${sortFilter} + FOR v, e, p IN 1..100 ${Direction[opts.direction]} obj GRAPH "${this.graph.name}" + OPTIONS ${defaultOptions} + ${filter} + RETURN { v, e, p }`; + associationCursor = await this.db.query(traversalQuery); + const rootEntityQuery = `For obj IN ${traversalCollectionName} ${rootFilter} ${limitFilter} ${sortFilter} return obj`; + rootCursor = await this.db.query(rootEntityQuery); + } catch (err: any) { + throw { code: err.code, message: err.message }; + } + + return { rootCursor, associationCursor }; + } + + async getAllChildrenNodes(startVertex: string, + edgeName: string): Promise { + const queryTpl = `FOR v IN 1..1 OUTBOUND @start_vertex @@edge_name RETURN v`; + const result = await this.db.query(queryTpl, { + start_vertex: startVertex, + '@edge_name': edgeName + }); + return result; + } + + arrUnique(arr: T[]) { + return [... new Set(arr)]; + } + + /** + * Adds the given edge definition to the graph. + * + * @param {string} edgeName edge name + * @param {Object} fromVertice from vertice + * @param {Object} toVertice from vertice + * @return {Object} The added edge definition + */ + async addEdgeDefinition(edgeName: string, fromVertice: (string | ArangoCollection)[], + toVertice: (string | ArangoCollection)[]): Promise { + if (_.isNil(edgeName)) { + throw new Error('missing edge name'); + } + if (_.isNil(fromVertice)) { + throw new Error('missing from vertice'); + } + if (_.isNil(toVertice)) { + throw new Error('missing to vertice'); + } + + if (!_.isArray(fromVertice)) { + fromVertice = [fromVertice]; + } + + if (!_.isArray(toVertice)) { + toVertice = [toVertice]; + } + + let edgeDef; + try { + edgeDef = await this.graph.addEdgeDefinition( + { + collection: edgeName, + from: fromVertice, + to: toVertice + } + ); + } catch (err: any) { + // if edge def already exists return + if (err.message === `${edgeName} multi use of edge collection in edge def`) { + return edgeDef; + } + throw { code: err.code, message: err.message }; + } + } + + /** + * Removes the edge definition with the given definitionName form the graph. + * + * @param {string} definitionName Name of the edge definition + * to remove from the graph. + * @param {boolean} dropCollection If set to true, the edge collection + * associated with the definition will also be deleted from the database. + * @return {Object} replaced edge definition + */ + async removeEdgeDefinition(definitionName: string, dropCollection?: boolean): Promise { + if (_.isNil(definitionName)) { + throw new Error('missing definition name'); + } + return this.graph.removeEdgeDefinition(definitionName, dropCollection); + } + + /** + * list graphs. + * + * @return {Promise} list all the graphs + */ + async listGraphs(): Promise { + return this.db.listGraphs(); + } + + /** + * Deletes the edge with the given documentHandle from the collection. + * + * @param {string} collectionName edge collection name + * @param {string} documentHandle The handle of the edge to retrieve. + * This can be either the _id or the _key of an edge in the collection, + * or an edge (i.e. an object with an _id or _key property). + * @return {Object} removed Edge + */ + async removeEdge(collectionName: string, documentHandle: string): Promise { + if (_.isNil(collectionName)) { + throw new Error('missing edge collection name'); + } + if (_.isNil(documentHandle)) { + throw new Error('missing document handle'); + } + const collection = this.graph.edgeCollection(collectionName); + return collection.remove(documentHandle); + } +} diff --git a/packages/chassis-srv/src/database/provider/arango/index.ts b/packages/chassis-srv/src/database/provider/arango/index.ts new file mode 100644 index 00000000..3e8a2955 --- /dev/null +++ b/packages/chassis-srv/src/database/provider/arango/index.ts @@ -0,0 +1,131 @@ +import { Arango } from './base.js'; +import { ArangoGraph } from './graph.js'; + +import retry from 'async-retry'; +import * as fs from 'fs'; +import { Database } from 'arangojs'; +import { type Logger } from '@restorecommerce/logger'; + +const DB_SYSTEM = '_system'; + +/** + * Connect to a ArangoDB. + * @param {Object} conf Connection options. + * @param {Logger} logger + * @return active ArangoDB connection + */ +const connect = async (conf: any, logger: Logger): Promise => { + const dbHost = conf.host || '127.0.0.1'; + const dbPort = conf.port || 8529; + const dbName = conf.database || 'arango'; + const autoCreate = conf.autoCreate || false; + const attempts = conf.retries || 3; + const delay = conf.delay || 1000; + const arangoVersion = conf.version || 30000; + + let url = 'http://'; + + const username = conf.username; + const password = conf.password; + + if (username && password) { + url = url + `${username}:${password}@`; + } + + url = url + `${dbHost}:${dbPort}`; + + let mainError; + let i = 1; + try { + return await retry(async () => { + logger?.info('Attempt to connect database', { + dbHost, dbPort, dbName, + attempt: i + }); + i += 1; + const db = new Database({ + url, + arangoVersion, + }); + try { + if (username && password) { + db.useBasicAuth(username, password); + } + await db.database(dbName).get(); + } catch (err: any) { + if (err.name === 'ArangoError' && err.errorNum === 1228) { + if (autoCreate) { + logger?.verbose(`auto creating arango database ${dbName}`); + // Database does not exist, create a new one + db.database(DB_SYSTEM); + await db.createDatabase(dbName); + db.database(dbName); + return db.database(dbName); + } + } + throw err; + } + return db.database(dbName); + }, { retries: attempts, minTimeout: delay }); + } + catch (err: any) { + const safeError = Object.getOwnPropertyNames(Object.getPrototypeOf(err)) + .reduce((acc: any, curr: any) => { return acc[curr] = err[curr], acc; }, {}); + logger?.error('Database connection error', { err: safeError, dbHost, dbPort, dbName, attempt: i }); + mainError = err; + } + throw mainError; +}; + +/** + * Create a new connected ArangoDB provider. + * + * @param {Object} conf ArangoDB configuration + * @param {Object} [logger] Logger + * @return {Arango} ArangoDB provider + */ +export const create = async (conf: any, logger: any, graphName?: string, edgeDefConfig?: any): Promise => { + let graph; + const conn = await connect(conf, logger); + let db: Arango; + // conn is nothing but this.db + if (graphName) { + try { + graph = conn.graph(graphName); + await graph.create(edgeDefConfig); + } catch (err: any) { + if (err.message !== 'graph already exists') { + throw err; + } + } + + db = new ArangoGraph(conn, graph, edgeDefConfig, logger); + } else { + db = new Arango(conn, logger); + } + + // iterate db conf and create list of views / analayzers + if (conf?.arangoSearch?.length > 0) { + for (const obj of conf.arangoSearch) { + try { + const { collectionName, path } = obj; + const viewCfg = JSON.parse(fs.readFileSync(path, 'utf8')); + await db.createAnalyzerAndView(viewCfg, collectionName); + } catch (error: any) { + logger?.error('Error creating analyzer or view', { + code: error.code, message: error.message, stack: error.stack + }); + } + } + } + + if (conf.customQueries) { + conf.customQueries.forEach((obj: any) => { + const { path, name, type } = obj; + const script = fs.readFileSync(path, 'utf8'); + db.registerCustomQuery(name, script, type); + }); + } + + return db; +}; \ No newline at end of file diff --git a/packages/chassis-srv/src/database/provider/arango/interface.ts b/packages/chassis-srv/src/database/provider/arango/interface.ts new file mode 100644 index 00000000..2f33ea08 --- /dev/null +++ b/packages/chassis-srv/src/database/provider/arango/interface.ts @@ -0,0 +1,30 @@ +import { ArrayCursor } from 'arangojs/cursor'; +import { CreateArangoSearchViewOptions } from 'arangojs/view'; +import { CreateAnalyzerOptions } from 'arangojs/analyzer'; + +export interface TraversalResponse { + rootCursor?: ArrayCursor; + associationCursor?: ArrayCursor; +} + +export interface AnalyzerOptions { + [key: string]: CreateAnalyzerOptions; +} + +export interface ViewAnalyzerOptions { + view: { + collectionName: string; + viewName: string; + similarityThreshold: number; + options: CreateArangoSearchViewOptions; + }; + analyzers: string[]; + analyzerOptions: AnalyzerOptions[]; +} + +export interface ViewMap { + fields: string[]; // list of indexed fields of entity + viewName: string; + similarityThreshold: number; + analyzerOptions: AnalyzerOptions[]; +} diff --git a/packages/chassis-srv/src/database/provider/arango/utils.ts b/packages/chassis-srv/src/database/provider/arango/utils.ts new file mode 100644 index 00000000..cbf5b002 --- /dev/null +++ b/packages/chassis-srv/src/database/provider/arango/utils.ts @@ -0,0 +1,574 @@ +import * as _ from 'lodash'; +import Long from 'long'; +import { + Filters as GraphFilters, + Options_Direction, + Options_Direction as Direction, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/graph.js'; + +const filterOperationMap = new Map([ + [0, 'eq'], + [1, 'lt'], + [2, 'lte'], + [3, 'gt'], + [4, 'gte'], + [5, 'isEmpty'], + [6, 'iLike'], + [7, 'in'], + [8, 'neq'] +]); + +const filterOperatorMap = new Map([ + [0, 'and'], + [1, 'or'] +]); + +/** + * Takes filter object containing field, operation and value and inserts it + * to the obj using the operatorList for finding the last operator position and updates obj + * @param {filter} filter object containing field, operation, value and type + * @param {obj} obj converted filter object + * @param {operatorList} operatorList list of operators from original filter object + */ +const convertFilterToObject = (filter: any, obj: any, operatorList: any) => { + let temp = _.clone(obj); + let value; + if (!filter.type || filter.type === 'STRING' || filter.type === 0) { + value = filter.value; + } else if ((filter.type === 'NUMBER' || filter.type === 1) && !isNaN(filter.value)) { + value = Number(filter.value); + } else if (filter.type === 'BOOLEAN' || filter.type === 2) { + if (filter.value === 'true') { + value = true; + } else if (filter.value === 'false') { + value = false; + } + } else if (filter.type === 'ARRAY' || filter.type === 4) { + try { + value = JSON.parse(filter.value); + } catch (err: any) { + // to handle JSON string parse error + if (err.message.indexOf('Unexpected token') > -1) { + value = JSON.parse(JSON.stringify(filter.value)); + } else { + throw err; + } + } + } else if (filter.type === 'DATE' || filter.type === 3) { + value = (new Date(filter.value)).getTime(); + } + + for (let i = 0; i < operatorList.length; i++) { + if (_.isArray(temp)) { + temp = _.find(temp, operatorList[i]); + } else { + temp = temp[operatorList[i]]; + } + if (i === (operatorList.length - 1)) { + // push for final element in the operatorList array + if (filter.operation === 'eq' || filter.operation === 0) { + if (_.isArray(temp)) { + temp.push({ [filter.field]: value }); + } else { + temp[operatorList[i]].push({ [filter.field]: value }); + } + } else if (filter.operation === 'neq' || filter.operation === 8) { + if (_.isArray(temp)) { + temp.push({ [filter.field]: { $not: { $eq: value } } }); + } else { + temp[operatorList[i]].push({ [filter.field]: { $not: { $eq: value } } }); + } + } else { + let opValue; + if (typeof filter.operation === 'string' || filter.operation instanceof String) { + opValue = filter.operation; + } else if (Number.isInteger(filter.operation)) { + opValue = filterOperationMap.get(filter.operation); + } + const op = `$${opValue}`; + if (_.isArray(temp)) { + temp.push({ [filter.field]: { [op]: value } }); + } else { + temp[operatorList[i]].push({ [filter.field]: { [op]: value } }); + } + } + } + } + return obj; +}; + +/** + * Inserts the new operator into obj iterating throught the operator list and updates obj + * @param {obj} obj Converted filter object + * @param {operatorList} operatorList operator list + * @param {operatorNew} operatorNew new operator + */ +const insertNewOpAndUpdateObj = (obj: any, operatorList: any, operatorNew: any) => { + let pos = _.clone(obj); + for (let i = 0; i < operatorList.length; i++) { + if (_.isArray(pos)) { + pos = _.find(pos, operatorList[i]); + } else { + pos = pos[operatorList[i]]; + } + // push new operator after iterating to the last element in operatorList + if (i === (operatorList.length - 1)) { + pos.push({ [operatorNew]: [] }); + } + } + return obj; +}; + +/** + * toTraversalFilterObject takes input contained in the proto structure defined in resource_base proto + * and converts it into Object understandable by the underlying DB implementation in chassis-srv + * @param {*} input Original filter input object + * @param {*} obj converted filter objected passed recursively + * @param {*} operatorList operatorlist updated and passed recursively + */ +export const toTraversalFilterObject = (input: any, obj?: any, operatorList?: string[]) => { + // since toObject method is called recursively we are not adding the typing to input parameter + let filters; + if (input && !_.isEmpty(input.filters)) { + filters = input.filters; + } else { + filters = input; + } + // use operator provided in input + if (input.operator) { + filters.operator = input.operator; + } + // by default use 'and' operator if no operator is specified + if (filters && _.isArray(filters.filters) && !filters.operator) { + filters.operator = 'and'; + } + if (!obj) { + obj = {}; + } + if (_.isArray(filters.filters) && filters.filters.length > 0) { + let operatorValue; + if (typeof filters.operator === 'string' || filters.operator instanceof String) { + operatorValue = filters.operator; + } else if (Number.isInteger(filters.operator)) { + operatorValue = filterOperatorMap.get(filters.operator); + } + const newOperator = `$${operatorValue}`; + if (operatorList && newOperator) { + // insert obj with new operator + obj = insertNewOpAndUpdateObj(obj, operatorList, newOperator); + operatorList.push(newOperator); + } else { + operatorList = [newOperator]; + obj[newOperator] = []; + } + // pass operatorList and obj recursively + obj = toTraversalFilterObject(filters.filters, obj, operatorList); + } else if (_.isArray(filters)) { + if (!operatorList) { + const operator = input.operator ? `$${input.operator}` : '$and'; + operatorList = [operator]; + } + if (_.isEmpty(obj)) { + const operator = input.operator ? `$${input.operator}` : '$and'; + obj = { [operator]: [] }; + } + for (const filterObj of filters) { + obj = toTraversalFilterObject(filterObj, obj, operatorList); + } + } else if (filters.field && (filters.operation || filters.operation === 0) && filters.value != undefined) { + // object contains field, operation and value, update it on obj using convertFilterToObject() + if (!operatorList) { + const operator = input.operator ? `$${input.operator}` : '$and'; + operatorList = [operator]; + } + if (_.isEmpty(obj)) { + const operator = filters.operator ? `$${filters.operator}` : '$and'; + obj = { [operator]: [] }; + } + obj = convertFilterToObject(filters, obj, operatorList); + } + return obj; +}; + +/** + * Auto-casting reference value by using native function of arangoDB + * + * @param {string} key + * @param {object} value - raw value optional + * @return {object} interpreted value + */ +export const autoCastKey = (key: any, value?: any): any => { + if (_.isDate(value)) { // Date + return `DATE_TIMESTAMP(v.${key})`; + } + return 'v.' + key; +}; + +/** + * Auto-casting raw data + * + * @param {object} value - raw value + * @returns {any} interpreted value + */ +export const autoCastValue = (value: any): any => { + if (_.isArray(value)) { + return value.map(value => value.toString()); + } + if (_.isString(value)) { // String + return JSON.stringify(value); + } + if (_.isBoolean(value)) { // Boolean + return Boolean(value); + } + if (_.isNumber(value)) { + return _.toNumber(value); + } + if (Long.isLong(value)) { + return (value as Long).toNumber(); + } + if (_.isDate(value)) { // Date + return new Date(value); + } + return value; +}; + + +/** + * Links children of filter together via a comparision operator. + * @param {any} filter + * @param {string} op comparision operator + * @return {any} query template string and bind variables + */ +export const buildComparison = (filter: any, op: string, root?: boolean): any => { + const ele = _.map(filter, (e) => { + if (!_.isArray(e)) { + e = [e]; + } + e = buildGraphFilter(e, root); + return e.q; + }); + + let q = '( '; + for (let i = 0; i < ele.length; i += 1) { + if (i == ele.length - 1) { + q = `${q} ${ele[i]} )`; + } else { + q = `${q} ${ele[i]} ${op} `; + } + } + return { q }; +}; + +/** + * Auto-casting reference value by using native function of arangoDB + * + * @param {string} key + * @param {object} value - raw value optional + * @return {object} interpreted value + */ +export const autoCastRootKey = (key: any, value?: any): any => { + if (_.isDate(value)) { // Date + return `DATE_TIMESTAMP(obj.${key})`; + } + return 'obj.' + key; +}; + +/** + * Creates a filter key, value. + * When the value is a string, boolean, number or date a equal comparision is created. + * Otherwise if the key corresponds to a known operator, the operator is constructed. + * @param {string} key + * @param {string|boolean|number|date|object} value + * @return {String} query template string + */ +export const buildGraphField = (key: any, value: any, root?: boolean): string => { + let autoCastKeyFunction; + if (!root) { + autoCastKeyFunction = autoCastKey; + } else { + autoCastKeyFunction = autoCastRootKey; + } + if (_.isString(value) || _.isBoolean(value) || _.isNumber(value || _.isDate(value))) { + return autoCastKeyFunction(key, value) + ' == ' + autoCastValue(value); + } + if (!_.isNil(value.$eq)) { + return autoCastKeyFunction(key, value) + ' == ' + autoCastValue(value.$eq); + } + if (value.$gt) { + return autoCastKeyFunction(key, value) + ' > ' + autoCastValue(value.$gt); + } + if (value.$gte) { + return autoCastKeyFunction(key, value) + ' >= ' + autoCastValue(value.$gte); + } + if (value.$lt) { + return autoCastKeyFunction(key, value) + ' < ' + autoCastValue(value.$lt); + } + if (value.$lte) { + return autoCastKeyFunction(key, value) + ' <= ' + autoCastValue(value.$lte); + } + if (!_.isNil(value.$ne)) { + return autoCastKeyFunction(key, value) + ' != ' + autoCastValue(value.$ne); + } + if (value.$inVal) { + return autoCastValue(value.$inVal) + ' IN ' + autoCastKeyFunction(key, value); + } + if (value.$in) { + if (_.isString(value.$in)) { + // if it is a field which should be an array + // (useful for querying within a document list-like attributen + return autoCastValue(value.$in) + ' IN ' + autoCastKeyFunction(key); + } + // assuming it is a list of provided values + return autoCastKeyFunction(key, value) + ' IN ' + autoCastValue(value.$in); + } + if (value.$nin) { + return autoCastKeyFunction(key, value) + ' NOT IN ' + autoCastValue(value.$nin); + } + if (value.$iLike) { + // @param 'true' is for case insensitive + return 'LOWER(' + autoCastKeyFunction(key, value) + ') LIKE ' + autoCastValue(value.$iLike.toLowerCase()); + } + if (!_.isNil(value.$not)) { + const temp = buildGraphField(key, value.$not, root); + return `!(${temp})`; + } + if (_.has(value, '$isEmpty')) { + // will always search for an empty string + return autoCastKeyFunction(key, '') + ' == ' + autoCastValue(''); + } + throw new Error(`unsupported operator ${_.keys(value)} in ${key}`); +}; + +/** + * Build ArangoDB query based on filter. + * @param {Object} filter key, value tree object + * @return {any} query template string and bind variables + */ +export const buildGraphFilter = (filter: any, root?: boolean): any => { + if (!root) { + root = false; + } + if (filter.length > 0) { + let q: any = ''; + let multipleFilters = false; + for (const eachFilter of filter) { + _.forEach(eachFilter, (value, key) => { + switch (key) { + case '$or': + if (!multipleFilters) { + if (_.isEmpty(value)) { + q = true; + } else { + q = buildComparison(value, '||', root).q; + } + + multipleFilters = true; + } else { + q = q + '&& ' + buildComparison(value, '||', root).q; + } + break; + case '$and': + if (!multipleFilters) { + if (_.isEmpty(value)) { + q = false; + } else { + q = buildComparison(value, '&&', root).q; + } + multipleFilters = true; + } else { + q = q + '&& ' + buildComparison(value, '&&', root).q; + } + break; + default: + if (_.startsWith(key, '$')) { + throw new Error(`unsupported query operator ${key}`); + } + if (!multipleFilters) { + q = buildGraphField(key, value, root); + multipleFilters = true; + } else { + q = q + ' && ' + buildGraphField(key, value, root); + } + break; + } + }); + } + return { q }; + } +}; + +/** + * Find's the list of entities from edge definition config depending on the direction + * recursively + * @param collection - root collection / start vertex + * @param edgeDefConfig - edge definition cofnig + * @param direction - direction OUTBOUND / INBOUND + * @param entitiesList - result of entities in the graph of edge definition config + */ +export const recursiveFindEntities = (collection: any, edgeDefConfig: any, direction: any, entitiesList: any) => { + if (entitiesList.includes(collection)) { + return; + } + entitiesList.push(collection); + let items = []; + if (direction === Direction.OUTBOUND) { + items = edgeDefConfig.filter((col: any) => col.from === collection); + } else if (direction === Direction.INBOUND) { + items = edgeDefConfig.filter((col: any) => col.to === collection); + } + for (const item of items) { + if (direction === Direction.OUTBOUND) { + recursiveFindEntities(item.to, edgeDefConfig, direction, entitiesList); + } else if (direction === Direction.INBOUND) { + recursiveFindEntities(item.from, edgeDefConfig, direction, entitiesList); + } + } + return entitiesList; +}; + +/** + * Build limit and offset filters. + * @param {limit} limit + * @param {offset} offset + * @return {String} string limit filter + */ +export const buildGraphLimiter = (limit?: number, offset?: number): string => { + // LIMIT count + // LIMIT offset, count + if (!limit) { + limit = 1000; + } + if (!_.isNil(limit)) { + if (!_.isNil(offset)) { + return `LIMIT ${offset}, ${limit}`; + } + return `LIMIT ${limit}`; + } + return ''; +}; + +/** + * Build sort filter. + * @param {Object} sort sort options + * @return {any} template sort string + */ +export const buildGraphSorter = (sortList: any): any => { + if (_.isNil(sortList) || _.isEmpty(sortList)) { + return ''; + } + + const sort = _.mapKeys(sortList, (value, key) => { + return autoCastRootKey(key); + }); + let sortKeysOrder = ''; + let i = 1; + const objLength = Object.keys(sort).length; + for (const key in sort) { + if (objLength == i) { + // Do not append ',' for the last element + sortKeysOrder = `${sortKeysOrder} ${key} ${sort[key]} `; + } else { + sortKeysOrder = `${sortKeysOrder} ${key} ${sort[key]},`; + } + i += 1; + } + return 'SORT ' + sortKeysOrder; +}; + +export const createGraphsAssociationFilter = (filters: GraphFilters[], + direction: Direction, traversalCollectionName: string, edgeDefConfig: any, filter: string): any => { + let filterObj = []; + let filteredEntities = []; // used to find difference from graph edgeDefConfig and add missing entities to custom filter + let completeEntities = []; + let rootEntityFilter; + // convert the filter from proto structure (field, operation, value and operand) to {field: value } mapping + if (filters && !_.isEmpty(filters)) { + if (!_.isArray(filters)) { + filters = [filters]; + } + for (const eachFilter of filters) { + if (eachFilter.entity && eachFilter.entity === traversalCollectionName) { + rootEntityFilter = toTraversalFilterObject(eachFilter); + continue; + } + const traversalFilterObj = toTraversalFilterObject(eachFilter); + if (eachFilter.entity && eachFilter.entity != traversalCollectionName) { + filteredEntities.push(eachFilter.entity); + traversalFilterObj.entity = eachFilter.entity; + } else if (eachFilter.edge) { + // depending on direction + const entityConnectedToEdge = edgeDefConfig.filter((e: any) => e.collection === eachFilter.edge); + if (entityConnectedToEdge?.length === 1) { + if (direction === Options_Direction.OUTBOUND) { + filteredEntities.push(entityConnectedToEdge[0].to); + } else if (direction === Options_Direction.INBOUND) { + filteredEntities.push(entityConnectedToEdge[0].from); + } + } + traversalFilterObj.edge = eachFilter.edge; + } + filterObj.push(traversalFilterObj); + } + } + + if (!_.isArray(filterObj)) { + filterObj = [filterObj]; + } + + if (filterObj?.length > 0) { + completeEntities = recursiveFindEntities(traversalCollectionName, edgeDefConfig, direction, []); + } + + // construct AQL custom filter based on filterObj using buildFilter api + let customFilter = ''; + let rootCollectionFilter = ''; + if (filterObj && filterObj.length > 0) { + for (let i = 0; i < filterObj.length; i++) { + let entity = ''; + let edge = ''; + if (filterObj[i].entity) { + entity = filterObj[i].entity; + delete filterObj[i].entity; + } else if (filterObj[i].edge) { + edge = filterObj[i].edge; + delete filterObj[i].edge; + } + let filterString = buildGraphFilter([filterObj[i]]).q; + if (typeof filterString === 'string' && + filterString.startsWith('(') && filterString.endsWith(')')) { + if (entity) { + filterString = filterString.substring(0, 1) + ` v._id LIKE "${entity}%" && ` + filterString.substring(1); + if (traversalCollectionName && entity === traversalCollectionName) { + rootCollectionFilter = filterString; + } + } else if (edge) { + filterString = filterString.substring(0, 1) + ` e._id LIKE "${edge}%" && ` + filterString.substring(1); + } + } + if (i === filterObj.length - 1) { + customFilter = customFilter + filterString; + } else { + customFilter = customFilter + filterString + ' || '; + } + } + } + + if (customFilter) { + filter = filter + ` FILTER ${customFilter}`; + // add missing entities to FILTER query + filteredEntities = filteredEntities.sort(); + completeEntities = completeEntities.sort(); + if (!_.isEqual(filteredEntities, completeEntities)) { + for (const removeEntity of _.intersection(filteredEntities, completeEntities)) { + completeEntities.splice(completeEntities.indexOf(removeEntity), 1); + } + } + // AQL query for missing entities + if (completeEntities && completeEntities.length > 0) { + for (const missingEntity of completeEntities) { + filter = filter + ` || ( v._id LIKE "${missingEntity}%" )`; + } + } + } + return { rootEntityFilter, associationFilter: filter }; +}; diff --git a/packages/chassis-srv/src/database/provider/nedb/index.ts b/packages/chassis-srv/src/database/provider/nedb/index.ts new file mode 100644 index 00000000..40782544 --- /dev/null +++ b/packages/chassis-srv/src/database/provider/nedb/index.ts @@ -0,0 +1,385 @@ +import Datastore from 'nedb'; +import * as _ from 'lodash'; +import { Logger } from '@restorecommerce/logger'; + +/** + * Converts unsupported functions to regexp. + * @param {object} filter query filter + * @return {object} the filter querys which are not supported by nedb converted to regexp. + */ +const convertToRegexp = (filter: any): any => { + const f = filter; + _.forEach(f, (value, key) => { + if (value.$startswith) { + f[key] = { + $regex: new RegExp(`^${value.$startswith}.*$`, 'i'), + }; + } else if (value.$endswith) { + f[key] = { + $regex: new RegExp(`^.*${value.$endswith}$`, 'i'), + }; + } else if (_.has(value, '$isEmpty')) { + f[key] = { + $regex: new RegExp(`^$`), + }; + } else if (_.has(value, '$iLike')) { + // neDB does not have ILIKE (LIKE with ignore case sensitive) + // e.g.: convert %sOrT% => to /sort/ and find all fields + // whose name contain the substring 'sort' using the regular expression + const iLikeVal = f[key].$iLike.slice(1, -1).toLowerCase(); + // convert sort => to regexp /sort/ + f[key] = new RegExp(iLikeVal); + } else if (_.isObject(value)) { + f[key] = convertToRegexp(value); + } + }); + return f; +}; + +/** + * Construct or operator. + * @param {Object} options the or statement + * example: { $or: [{ planet: 'Earth' }, { planet: 'Mars' }] } + * @param {string} name the field name the comparison is based on. + * @return {Object} NeDB or operator query filter. + */ +const buildOrQuery = (options: any, name: string): object => { + let opts = options; + if (!_.isArray(options)) { + opts = [options]; + } + const obj: any = { $or: [] }; + opts.forEach((item: any) => { + const toInsert: any = {}; + toInsert[name] = item; + obj.$or.push(toInsert); + }); + return obj; +}; + +/** + * NeDB database provider. + */ +class NedbProvider { + collections: any; + /** + * @param {Object} collections a map, collection name mapped to store + */ + constructor(collections: any) { + this.collections = collections; + } + + /** + * Insert documents into database. + * + * @param {String} collection Collection name + * @param {Object|array.Object} documents A single or multiple documents. + */ + async insert(collection: string, documents: any): Promise { + const collections = this.collections; + if (!_.isArray(documents)) { + documents = [documents]; + } + const docs = _.cloneDeep(documents); + for (const doc of docs) { + _.set(doc, '_id', doc.id); + } + return new Promise((resolve, reject) => { + collections[collection].insert(docs, (err: any, newdocs: any) => { + // docs + if (err) { + resolve([{ + error: true, + errorMessage: err.message + }]); + } else { + for (const newdoc of newdocs) { + _.unset(newdoc, '_id'); + } + resolve(newdocs); + } + }); + }); + } + + /** + * Find documents based on filter. + * + * @param {String} collection Collection name + * @param {Object} filter Key, value Object + * @param {Object} options options.limit, options.offset + * @return {array.Object} A list of found documents. + */ + async find(collection: string, filter: object = {}, options: any = {}): Promise { + const fil = convertToRegexp(filter || {}); + let q = this.collections[collection].find(fil, options.fields); + if (options.offset) { + q = q.skip(options.offset); + } + if (options.limit) { + q = q.limit(options.limit); + } + if (options.sort) { + q = q.sort(options.sort); + } + + const result = new Promise((resolve, reject) => { + q.exec((err: any, docs: any) => { + // docs + if (err) { + reject(err); + } else { + _.map(docs, (doc) => { + if (_.isNil(doc.id)) { + _.set(doc, '_id', doc._id); + } + _.unset(doc, '_id'); + }); + resolve(docs); + } + }); + }); + return result; + } + + /** + * Find documents by id (_key). + * + * @param {String} collection Collection name + * @param {String|array.String} identifications A single ID or multiple IDs. + * @return {array.Object} A list of found documents. + */ + async findByID(collection: string, identifications: any): Promise { + let ids = identifications; + if (!_.isArray(identifications)) { + ids = [identifications]; + } + const q = buildOrQuery(ids, 'id'); + const collections = this.collections; + const result = new Promise((resolve, reject) => { + collections[collection].find(q).exec((err: any, docs: any) => { + if (docs) { + const l = docs.length; + for (let i = 0; i < l; i += 1) { + _.unset(docs[i], '_id'); + } + resolve(docs); + } else if (err) { + reject(err); + } + }); + }); + return result; + } + + /** + * Find documents by filter and updates them with document. + * + * @param {String} collection Collection name + * @param {Object} filter Key, value Object + * @param {Object} document A document patch. + */ + async update(collection: string, document: any): Promise { + const collections = this.collections; + if (_.isArray(document)) { + document = document[0]; + } + const obj: any = { + $set: {}, + }; + Object.keys(document).forEach((key) => { + obj.$set[key] = document[key]; + }); + const filter = { id: document.id }; // construct filter using document ids + const fil = convertToRegexp(filter || {}); + const updatedDocs = new Promise((resolve, reject) => { + collections[collection].update(fil, obj, { multi: true, returnUpdatedDocs: true }, (err: any, numReplaced: any, updatedDocs: any) => { + if (err) { + resolve(err); + } else { + resolve(updatedDocs); + } + }); + }); + if (_.isEmpty(updatedDocs)) { + // document not found for update + return [{ + error: true, + errorMessage: 'document not found' + }]; + } + return _.map(updatedDocs, (doc) => { + _.unset(doc, '_id'); + return doc; + }); + } + + /** + * Find each document based on it's key and update it. + * If the document does not exist it will be created. + * + * @param {String} collection Collection name + * @param {Object|Array.Object} documents + */ + async upsert(collection: string, documents: any): Promise { + const collections = this.collections; + let docs = _.cloneDeep(documents); + if (!_.isArray(docs)) { + docs = [docs]; + } + const results = []; + for (const doc of docs) { + _.set(doc, '_id', doc.id); + const result = new Promise((resolve, reject) => { + collections[collection].update({ _id: doc._id }, + doc, + { upsert: true, returnUpdatedDocs: true }, + (err: any, numReplaced: any, upserted: any) => { + if (err) { + reject(err); + } + resolve(upserted); + }); + }); + results.push(await result); + } + return _.map(results, (doc) => { + _.unset(doc, '_id'); + return doc; + }); + } + + /** + * Delete all documents selected by filter. + * + * @param {String} collection Collection name + * @param {Object} filter + */ + async delete(collection: string, ids: string[]): Promise { + const collections = this.collections; + let fil = {}; + const deleteResponse: any[] = []; + for (const id of ids) { + collections[collection].find({ id }, (err: any, docs: any) => { + if (_.isEmpty(docs)) { + deleteResponse.push({ + error: true, + errorMessage: 'Document not found' + }); + } else { + deleteResponse.push(docs[0]); + } + }); + } + if (_.isEmpty(ids)) { + fil = {}; // if no ids are provided delete all documents and filter for this is {} + } else { + fil = { id: {$in: ids} }; + } + const numRemoved = await new Promise((resolve, reject) => { + collections[collection].remove(fil, { multi: true }, (err: any, numRemoved: any) => { + if (err) { + throw new Error(err); + } else { + resolve(numRemoved); + } + }); + }); + return deleteResponse; + } + + /** + * Count all documents selected by filter. + * + * @param {String} collection Collection name + * @param {Object} filter + */ + async count(collection: string, filter: object = {}): Promise { + const collections = this.collections; + const fil = convertToRegexp(filter || {}); + const result = new Promise((resolve, reject) => { + collections[collection].count(fil, (err: any, count: any) => { + if (err) + reject(err); + resolve(count); + }); + }); + return result; + } + + /** + * When calling without a collection name, + * delete all documents in all collections in the database. + * When providing a collection name, + * delete all documents in specified collection in the database. + * @param [string] collection Collection name. + */ + async truncate(collection: string): Promise { + if (_.isNil(collection)) { + const collections = _.keys(this.collections); + for (let i = 0; i < collections.length; i += 1) { + await this.delete(collections[i], []); + } + } else { + await this.delete(collection, []); + } + } +} + +/** + * Open all configured NeDB datastores. + * @param {Object} config + * example: + * { + * "provider": "nedb", + * "collections": { + * "notifications": {} + * } + * } + * @param {Logger} logger + * @return {Object} key, value map containing collection names + * as keys and the corresponding NeDB datastores as values. + */ +const loadDatastores = async (config: any, logger: Logger): Promise => { + if (_.isNil(config.collections)) { + throw new Error('missing collection config value'); + } + const collections: any = {}; + const colNames = _.keys(config.collections); + for (let i = 0; i < colNames.length; i += 1) { + const name = colNames[i]; + const conf = config.collections[name]; + if (conf.filename) { + logger.verbose(`collection ${name} has filename ${conf.filename}`); + conf.autoload = true; + const load = () => { + return (cb: any) => { + conf.onload = cb; + collections[name] = new Datastore(conf); + }; + }; + await load(); + } else { + collections[name] = new Datastore(conf); + } + } + return collections; +}; + +/** + * Create a new NeDB provider. + * + * @param {Object} conf NeDB configuration + * @param {Object} [logger] Logger + * @return {NedbProvider} NeDB provider + */ +export const create = async (conf: object, logger: any): Promise => { + let log = logger; + if (_.isNil(logger)) { + log = { + verbose: () => { }, + }; + } + const collections = await loadDatastores(conf, log); + return new NedbProvider(collections); +}; \ No newline at end of file diff --git a/packages/chassis-srv/src/health/index.ts b/packages/chassis-srv/src/health/index.ts new file mode 100644 index 00000000..a93b4213 --- /dev/null +++ b/packages/chassis-srv/src/health/index.ts @@ -0,0 +1,93 @@ +import { CommandInterface } from '../command-interface/index.js'; +import { ServiceConfig } from '@restorecommerce/service-config'; +import { createClient } from '@restorecommerce/grpc-client'; +import { Logger } from '@restorecommerce/logger'; +import { + HealthClient, + HealthDefinition, +} from '@restorecommerce/rc-grpc-clients/dist/generated/grpc/health/v1/health.js'; +import { + DeepPartial, + HealthCheckRequest, + HealthCheckResponse, + HealthCheckResponse_ServingStatus, + HealthServiceImplementation +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/grpc/health/v1/health.js'; +import { createChannel } from 'nice-grpc'; + +export interface HealthOptions { + cfg?: ServiceConfig; + dependencies?: string[]; + logger?: Logger; + readiness?: () => Promise; +} + +export class Health implements HealthServiceImplementation { + + readonly ci: CommandInterface; + readonly opts?: HealthOptions; + readonly endpoints?: { [key: string]: HealthClient }; + + constructor(ci: CommandInterface, opts?: HealthOptions) { + this.ci = ci; + this.opts = opts; + + if (this.opts) { + if (this.opts.dependencies && this.opts.cfg && this.opts.logger) { + this.endpoints = {}; + const clientCfg = this.opts.cfg.get('client'); + for (const dependency of this.opts.dependencies) { + const dep = clientCfg?.[dependency] ?? this.opts.cfg.get(dependency); + if (!dep) { + throw new Error(`Dependency '${ dependency }' not provided in config!`); + } + + const channel = createChannel(dep.address); + this.endpoints[dep.endpoint ?? dependency] = createClient({ + ...dep, + logger: this.opts.logger + }, HealthDefinition, channel); + } + } + } + } + + async check(request: HealthCheckRequest, context: any): Promise> { + const service = (request && request.service) || 'liveness'; + + if (service === 'readiness') { + if (this.ci.redisClient && !this.ci.redisClient.ping()) { + return {status: HealthCheckResponse_ServingStatus.NOT_SERVING}; + } + + if (this.opts) { + try { + if (this.endpoints) { + for (const service of Object.keys(this.endpoints)) { + const response = await this.endpoints[service].check({}); + if ('error' in response && (response as any).error) { + this.opts.logger.warn('Readiness error from ' + service + ':', response); + return {status: HealthCheckResponse_ServingStatus.NOT_SERVING}; + } + } + } + + if (this.opts.readiness && !await this.opts.readiness()) { + return {status: HealthCheckResponse_ServingStatus.NOT_SERVING}; + } + } catch (e) { + return {status: HealthCheckResponse_ServingStatus.NOT_SERVING}; + } + } + } + + const response = await this.ci.check({}); + + if (!('status' in response)) { + return {status: HealthCheckResponse_ServingStatus.UNKNOWN}; + } + + return response; + } + +} diff --git a/packages/chassis-srv/src/index.ts b/packages/chassis-srv/src/index.ts new file mode 100644 index 00000000..ccdec773 --- /dev/null +++ b/packages/chassis-srv/src/index.ts @@ -0,0 +1,38 @@ +import * as cache from './cache/index.js'; +export { cache }; +import * as config from './config/index.js'; +export { config }; +import * as database from './database/index.js'; +import { DatabaseProvider, GraphDatabaseProvider } from './database/index.js'; +export { database, DatabaseProvider, GraphDatabaseProvider }; +import { create as arango } from './database/provider/arango/index.js'; +export { arango }; +import { create as nedb } from './database/provider/nedb/index.js'; +export { nedb }; + +import { Server as Server } from './microservice/server.js'; +export { Server }; + +import * as errors from './microservice/errors.js'; +export { errors }; +import * as grpc from './microservice/transport/provider/grpc/index.js'; +export { grpc }; + +export { Server as grpcServer } from './microservice/transport/provider/grpc/index.js'; +import { buildReflectionService } from './microservice/transport/provider/grpc/reflection.js'; +export { buildReflectionService }; + +import { CommandInterface as CommandInterface } from './command-interface/index.js'; +export { CommandInterface }; + +import { OffsetStore } from './offsets/index.js'; +export { OffsetStore }; + +import { Health } from './health/index.js'; +export { Health }; + +import { toTraversalFilterObject } from './database/provider/arango/utils.js'; +export { toTraversalFilterObject }; + +import { TraversalResponse } from './database/provider/arango/interface.js'; +export { TraversalResponse }; diff --git a/packages/chassis-srv/src/microservice/errors.ts b/packages/chassis-srv/src/microservice/errors.ts new file mode 100644 index 00000000..45d7ff82 --- /dev/null +++ b/packages/chassis-srv/src/microservice/errors.ts @@ -0,0 +1,234 @@ +/** + * Canceled indicates the operation was cancelled (typically by the caller). + */ +export class Cancelled extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'operation was cancelled'; + this.details = details; + } +} + +/** + * InvalidArgument indicates client specified an invalid argument. + */ +export class InvalidArgument extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'invalid argument'; + this.details = details; + } +} + +/** + * NotFound means some requested entity was not found. + */ +export class NotFound extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'not found'; + this.details = details; + } +} + +/** + * AlreadyExists means an entity the operation attempted to create + * already exists. + */ +export class AlreadyExists extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'already exists'; + this.details = details; + } +} + +/** + * PermissionDenied indicates the caller does not have permission to + * execute the specified operation. + */ +export class PermissionDenied extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'permission denied'; + this.details = details; + } +} + +/** + * Unauthenticated means the caller could not be authenticated. + */ +export class Unauthenticated extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'unauthenticated'; + this.details = details; + } +} + +/** + * FailedPrecondition means the system is not in a state in which + * the operation can be executed. A precondition, for example a call + * to a different endpoint before this call is required. + */ +export class FailedPrecondition extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'failed precondition'; + this.details = details; + } +} + +/** + * Aborted indicates the operation was aborted because + * of transaction aborts or sequencer check failures. + */ +export class Aborted extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'aborted'; + this.details = details; + } +} + +/** + * OutOfRange means one of the provided arguments is + * outside the range of the iterated data. + */ +export class OutOfRange extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'out of range'; + this.details = details; + } +} + +/** + * Unimplemented means the endpoint is not implemented, + * not specified or not configured. + */ +export class Unimplemented extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'unimplemented'; + this.details = details; + } +} + +/** + * ResourceExhausted indicates that a quota or storage is used up. + */ +export class ResourceExhausted extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'resource exhausted'; + this.details = details; + } +} + +/** + * DeadlineExceeded means the operation expired before completion. + * It does not mean the operation did not complete. + */ +export class DeadlineExceeded extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'deadline exceeded'; + this.details = details; + } +} + +/** + * Internal indicates an uncaught or unhandled server error. + */ +export class Internal extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'internal'; + this.details = details; + } +} + +/** + * Unavailable indicates that the service currently is not + * processing requests. + * This is mostlikly only a short condition. + */ +export class Unavailable extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'unavailable'; + this.details = details; + } +} + +/** + * DataLoss indicates unrecoverable data loss or corruption. + */ +export class DataLoss extends Error { + details: any; + name: string; + message: string; + constructor(details: any) { + super(); + this.name = this.constructor.name; + this.message = 'data loss'; + this.details = details; + } +} diff --git a/packages/chassis-srv/src/microservice/server.ts b/packages/chassis-srv/src/microservice/server.ts new file mode 100644 index 00000000..9eb6b6e2 --- /dev/null +++ b/packages/chassis-srv/src/microservice/server.ts @@ -0,0 +1,159 @@ +import { type Logger, createLogger } from '@restorecommerce/logger'; +import * as _ from 'lodash'; +import { EventEmitter } from 'events'; +import { BindConfig, grpcServer } from './transport/provider/grpc/index.js'; + +const transports: Record = {}; + +/** + * register transport provider + * + * @param {string} name transport provider identifier + * @param {constructor} provider transport provider constructor function + */ +export const registerTransport = (name: string, provider: any): void => { + transports[name] = provider; +}; + +// register included providers +registerTransport('grpc', grpcServer); + +/** + * Initializes all configured transports. + * @param {object} config Configuration + * @param {object} logger + * @return {object} Transport + */ +const setupTransport = (config: any, logger: Logger): any => { + const transport: any = {}; + logger.debug('available transport providers', + Object.keys(transports).join(',')); + for (let i = 0; i < config.length; i += 1) { + const transportCfg = config[i]; + const providerName = transportCfg.provider; + if (_.isNil(providerName)) { + throw new Error('transport configuration without a provider'); + } + const transportName = transportCfg.name; + if (_.isNil(providerName)) { + throw new Error('transport configuration without a name'); + } + const TransportProvider = transports[providerName]; + if (_.isNil(TransportProvider)) { + throw new Error(`transport provider ${providerName} does not exist`); + } + const provider = new TransportProvider(transportCfg, logger); + transport[transportName] = provider; + } + logger.debug('using transports', Object.keys(transport).join(',')); + return transport; +}; + +/** + * Server is a microservice server chassis. + * It enables business logic to be accessed over transports and listen to events. + * Default event providers: 'kafka' + * Default transports: 'grpc' + * @class + */ +export class Server extends EventEmitter { + + config: any; + logger: Logger; + middleware: any; + transport: any; + + /** + * @constructor + * @param {object} config Server config. + * @param {Logger} logger + */ + constructor(config?: any, logger?: Logger) { + super(); + if (_.isNil(config)) { + throw new Error('mising argument config'); + } + this.config = config; + + // logger + if (_.isNil(logger)) { + if (_.isNil(this.config.logger)) { + this.logger = createLogger(); + } else { + const loggerCfg = this.config.logger; + this.logger = createLogger(loggerCfg); + } + } else { + this.logger = logger; + } + + // services + this.logger.debug('setting up service endpoints'); + + // transports + this.logger.debug('setting up transports'); + try { + this.transport = setupTransport(this.config.transports, this.logger); + } catch (error: any) { + this.logger.error('setupTransports', { code: error.code, message: error.message, stack: error.stack }); + throw error; + } + } + + /** + * bind connects the service to configured transports. + * + * @param {string} name Service name. + * @param {BindConfig} bindConfig A business logic service. + */ + async bind(name: string, bindConfig: BindConfig): Promise { + if (_.isNil(name)) { + throw new Error('missing argument name'); + } + if (!_.isString(name)) { + throw new Error('argument name is not of type string'); + } + if (_.isNil(bindConfig)) { + throw new Error('missing argument bindConfig'); + } + + this.logger.debug('binding endpoints to transports'); + + const transportNames = Object.keys(this.transport); + const transport = this.transport; + for (let i = 0; i < transportNames.length; i += 1) { + const transportName = transportNames[i]; + const provider = transport[transportName]; + await provider.bind(bindConfig); + this.emit('bound', name, bindConfig, provider); + } + } + + /** + * start launches the server by starting transports and listening to events. + */ + async start(): Promise { + const transportNames = Object.keys(this.transport); + for (let i = 0; i < transportNames.length; i += 1) { + const name = transportNames[i]; + const provider = this.transport[name]; + await provider.start(); + this.logger.info(`transport ${name} started`); + } + this.emit('serving', this.transport); + } + + /** + * Shuts down all transport provider servers. + */ + async stop(): Promise { + const transportNames = _.keys(this.transport); + for (let i = 0; i < transportNames.length; i += 1) { + const name = transportNames[i]; + if (this.transport[name].end) { + await this.transport[name].end(); + } + } + this.emit('stopped', this.transport); + } +} diff --git a/packages/chassis-srv/src/microservice/transport/provider/grpc/index.ts b/packages/chassis-srv/src/microservice/transport/provider/grpc/index.ts new file mode 100644 index 00000000..9a3d539c --- /dev/null +++ b/packages/chassis-srv/src/microservice/transport/provider/grpc/index.ts @@ -0,0 +1,93 @@ +import * as _ from 'lodash'; +import { type Logger } from '@restorecommerce/logger'; +import type { Server as GRPCServer, ServiceImplementation } from 'nice-grpc'; +import type { CompatServiceDefinition } from 'nice-grpc/lib/service-definitions'; +import { createServer } from 'nice-grpc'; +import { loggingMiddleware, metaMiddleware, tracingMiddleware, WithRequestID } from './middlewares.js'; + +/** + * Name of the transport + */ +export const NAME = 'grpc'; + +export interface BindConfig { + service: Service; + implementation: ServiceImplementation; +} + +/** + * Server transport provider. + * @class + */ +export class Server { + + config: any; + logger: Logger; + server: GRPCServer; + name: string; + isBound: boolean; + + /** + * Server is a gRPC transport provider for serving. + * + * @param {Object} config Configuration object. + * Requires properties: addr + * Optional properties: credentials.ssl.certs + * @param {Logger} logger Logger. + */ + constructor(config: any, logger: Logger) { + if (_.isNil(logger)) { + throw new Error('gRPC server transport provider requires a logger'); + } + if (!_.has(config, 'addr')) { + throw new Error('server is missing addr config field'); + } + this.config = config; + this.logger = logger; + + this.server = createServer(config?.channelOptions) + .use(tracingMiddleware) + .use(metaMiddleware) + .use(loggingMiddleware(this.logger)); + + this.name = NAME; + } + + /** + * bind maps the service to gRPC methods and binds the address. + * + * @param {BindConfig} config Service bind config. + */ + bind(config: BindConfig): void { + this.server.add(config.service, config.implementation); + } + + /** + * start launches the gRPC server and provides the service endpoints. + */ + async start(): Promise { + if (!this.isBound) { + if (_.has(this.config, 'credentials.ssl')) { + // TODO Re-enable + // credentials = grpc.credentials.createSsl( + // this.config.credentials.ssl.certs); + } + await this.server.listen( + this.config.addr + ).catch(err => { + this.logger.error('Error starting server', { message: err.message, code: err.code, stack: err.stack }); + throw err; + }); + this.isBound = true; + } + } + + /** + * end stops the gRPC server and no longer provides the service endpoints. + */ + async end(): Promise { + this.server.forceShutdown(); + } +} + +export { Server as grpcServer }; diff --git a/packages/chassis-srv/src/microservice/transport/provider/grpc/middlewares.ts b/packages/chassis-srv/src/microservice/transport/provider/grpc/middlewares.ts new file mode 100644 index 00000000..68279d13 --- /dev/null +++ b/packages/chassis-srv/src/microservice/transport/provider/grpc/middlewares.ts @@ -0,0 +1,99 @@ +import { CallContext, ServerError, ServerMiddlewareCall } from 'nice-grpc'; +import { isAbortError } from 'abort-controller-x'; +import { type Logger } from '@restorecommerce/logger'; +import { metadataPassThrough } from '@restorecommerce/grpc-client/dist/middleware.js'; +import { AsyncLocalStorage } from 'async_hooks'; +import { randomUUID } from 'crypto'; + +const tracingHeader = 'x-request-id'; + +export interface WithRequestID { + rid: string; +} + +export async function* tracingMiddleware( + call: ServerMiddlewareCall, + context: CallContext, +) { + const nextID = context.metadata.get(tracingHeader) || randomUUID(); + context.metadata?.set(tracingHeader, nextID); + return yield* call.next(call.request, { + ...context, + rid: nextID + }); +} + +export const loggingMiddleware = (logger: Logger) => { + return async function* ( + call: ServerMiddlewareCall, + context: CallContext & WithRequestID, + ) { + const {path} = call.method; + + logger.verbose(`[rid: ${context.rid}] received request to method ${path}`, call.request); + + try { + const response = yield* call.next(call.request, context); + logger.verbose(`[rid: ${context.rid}] request to method ${path} response sent`, {request: call.request}); + return response; + } catch (error: any) { + if (error instanceof ServerError) { + logger.error(`${context.rid} request to method ${path} server error`, { + message: error.details, + code: error.code + }); + } else if (isAbortError(error)) { + logger.error(`${context.rid} request to method ${path} cancel`, { + message: error.message, + stack: error.stack + }); + } else { + logger.error(`${context.rid} request to method ${path} error`, { + message: error.message, + stack: error.stack + }); + } + + return { + status: { + code: 500, + message: error.message + } + } as Awaited; + } + }; +}; + + +function bindAsyncGenerator( + store: AsyncLocalStorage, + generator: AsyncGenerator, +): AsyncGenerator { + const ctx = store.getStore(); + return { + next: () => store.run(ctx, generator.next.bind(generator)), + return: (args) => store.run(ctx, generator.return.bind(generator), args), + throw: (args) => store.run(ctx, generator.throw.bind(generator), args), + + [Symbol.asyncIterator]() { + return this; + }, + }; +} + +export async function* metaMiddleware( + call: ServerMiddlewareCall, + context: CallContext, +) { + const mapped: any = {}; + for (const [a, b] of context.metadata) { + mapped[a] = b; + } + + const val = JSON.stringify(mapped); + metadataPassThrough.enterWith(val); + + return yield* bindAsyncGenerator(metadataPassThrough, call.next(call.request, { + ...context, + })); +} diff --git a/packages/chassis-srv/src/microservice/transport/provider/grpc/reflection.ts b/packages/chassis-srv/src/microservice/transport/provider/grpc/reflection.ts new file mode 100644 index 00000000..98ce7ca4 --- /dev/null +++ b/packages/chassis-srv/src/microservice/transport/provider/grpc/reflection.ts @@ -0,0 +1,24 @@ +import { ServiceImplementation } from 'nice-grpc'; +import { + FileDescriptorSet, + FileDescriptorProto as FileDescriptorProtoGoogle +} from 'google-protobuf/google/protobuf/descriptor_pb.js'; +import { ServerReflection, } from 'nice-grpc-server-reflection'; +import { FileDescriptorProto } from '@restorecommerce/rc-grpc-clients/dist/generated/google/protobuf/descriptor.js'; +import { + IServerReflectionService +} from 'nice-grpc-server-reflection/lib/proto/grpc/reflection/v1alpha/reflection_grpc_pb.js'; + +export const buildReflectionService = (services: { + descriptor: any; + name?: string; +}[]): ServiceImplementation => { + const set = new FileDescriptorSet(); + const names: any[] = []; + services.forEach((service, i) => { + const serialized = FileDescriptorProto.encode(service.descriptor).finish(); + set.addFile(FileDescriptorProtoGoogle.deserializeBinary(serialized), i); + names.push(service.name || service.descriptor.name + '.Service'); + }); + return ServerReflection(set.serializeBinary(), names); +}; diff --git a/packages/chassis-srv/src/offsets/index.ts b/packages/chassis-srv/src/offsets/index.ts new file mode 100644 index 00000000..1e90879b --- /dev/null +++ b/packages/chassis-srv/src/offsets/index.ts @@ -0,0 +1,119 @@ +import { Events, Topic } from '@restorecommerce/kafka-client'; +import { createClient, RedisClientType } from 'redis'; +import { + type ServiceConfig +} from '@restorecommerce/service-config'; +import { + type Logger +} from '@restorecommerce/logger'; + +/** + * Stores the offsets of the provided topics to redis periodically + */ +export class OffsetStore { + // protected readonly topics: Record = {}; + protected readonly timerID: NodeJS.Timeout[] = []; + protected readonly prefix: string; + + constructor( + protected readonly kafkaEvents: Events, + protected readonly config: ServiceConfig, + protected readonly logger?: Logger, + protected readonly redisClient?: RedisClientType, + ) { + if (!kafkaEvents) { + logger?.error('No Kafka client was provided, offsets will not be stored to redis'); + return; + } + + if (!this.config.get('events:kafka:topics')) { + throw new Error('Kafka events configuration was not provided.'); + } + + this.prefix = this.config.get('events:kafka:kafka:clientId'); + const redisConfig = this.config.get('redis'); + if (!redisClient && redisConfig) { + redisConfig.database = this.config.get('redis:db-indexes:db-offsetStore') ?? 0; + this.redisClient = createClient(redisConfig); + this.redisClient.on( + 'error', + (err: Error) => logger?.error('Redis Client Error in offsetstore', err) + ); + this.redisClient.connect().then( + () => logger?.info('Redis client connection successful for offsetstore') + ); + } + setTimeout(this.updateTopicOffsets.bind(this), 5000); + } + + /** + * updates the topic offset in redis periodically + * + */ + async updateTopicOffsets(): Promise { + // Iterate through the topics and updateOffsets periodically for each topic + // events.topic(TopicName) - gives the topic object + const kafkaCfg = this.config.get('events:kafka'); + const topicTypes = Object.keys(kafkaCfg.topics ?? {}); + for (let i = 0; i < topicTypes.length; i += 1) { + const topicType = topicTypes[i]; + const topicName = kafkaCfg.topics[topicType].topic; + + this.kafkaEvents.topic(topicName).then(topic => { + // this.topics[topicType] = topic; + this.timerID[i] = setInterval( + this.storeOffset.bind(this), + this.config.get('redis:offsetStoreInterval') ?? 1000, + topic, + topicName + ) as any; + }); + } + } + + /** + * stores the offset to redis + * @param {object} topic Topic object + * @param {object} redisClient + * @return {object} + */ + async storeOffset(topic: Topic, topicName: string): Promise { + // get the latest offset here each time and store it. + const offsetValue = await topic.$offset(BigInt(-1)); + const redisKey = `${this.prefix}:${topicName}`; + this.redisClient.set(redisKey, offsetValue.toString(10)); + } + + /** + * get the offset value for the topic from redis + * @param {string} topic Topic name + * @return {object} + */ + async getOffset(topicName: string): Promise { + const redisKey = `${this.prefix}:${topicName}`; + const offsetValue = await this.redisClient.get(redisKey); + this.logger?.info( + 'The offset value retreived from redis for topic is:', + { topicName, offsetValue } + ); + if (!offsetValue) { + return BigInt(0); + } + return BigInt(offsetValue); + } + + /** + * stops the redis client + * @param {object} topic Topic object + * @param {object} redisClient + * @return {object} + */ + async stop(): Promise { + for (let i = 0; i < this.timerID.length; i += 1) { + clearInterval(this.timerID[i]); + } + if (this.redisClient) { + await this.redisClient.quit(); + } + } +} diff --git a/packages/chassis-srv/test/cache.spec.ts b/packages/chassis-srv/test/cache.spec.ts new file mode 100644 index 00000000..1d8757db --- /dev/null +++ b/packages/chassis-srv/test/cache.spec.ts @@ -0,0 +1,40 @@ +import * as should from 'should'; +import * as config from '../src/config/index.js'; +import { createLogger } from '@restorecommerce/logger'; +import * as cache from '../src/cache/index.js'; +import { it, describe, beforeEach } from 'vitest'; + +/* global describe it beforeEach */ + +describe('cache', () => { + let logger: any; + beforeEach(async () => { + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + logger = createLogger(cfg.get('logger')); + }); + describe('get', () => { + it('should return one store with the config for one store', + async () => { + const cfg = await config.get(); + const c = await cache.get(cfg.get('cache:one'), logger); + should.exist(c); + should.exist(c.get); + + await c.set('test', 'testvalue'); + const res = await c.get('test'); + res.should.equal('testvalue'); + }); + // it('should return one multiCaching store with the config for many stores', + // async () => { + // const cfg = await config.get(); + // const c = await cache.get(cfg.get('cache:many'), logger); + // should.exist(c); + // should.exist(c.get); + + // await c.set('test', 'testvalue'); + // const res = await c.get('test'); + // res.should.equal('testvalue'); + // }); + }); +}); diff --git a/packages/chassis-srv/test/cfg/config.json b/packages/chassis-srv/test/cfg/config.json new file mode 100644 index 00000000..13deaa16 --- /dev/null +++ b/packages/chassis-srv/test/cfg/config.json @@ -0,0 +1,187 @@ +{ + "logger": { + "console": { + "handleExceptions": false, + "level": "silly", + "colorize": true, + "prettyPrint": true + } + }, + "database": { + "arango": { + "provider": "arango", + "host": "127.0.0.1", + "port": 8529, + "database": "chassis-test", + "autoCreate": true, + "collections": [ + "test", + "user", + "address" + ], + "arangoSearch": [ + { + "collectionName": "users", + "path": "test/views/users_view.json" + }, + { + "collectionName": "addresss", + "path": "test/views/address_view.json" + } + ] + }, + "nedb": { + "provider": "nedb", + "collections": { + "test": {} + } + } + }, + "redis": { + "host": "localhost", + "port": 6379, + "offsetStoreInterval": 1000, + "db-indexes": { + "db-offsetStore": 0, + "db-subject": 4 + } + }, + "cache": { + "one": [ + { + "provider": "memory" + } + ] + }, + "graph": { + "graphName": "chassis-graph-test", + "edgeDefinitions": [ + { + "collection": "has", + "from": "person", + "to": "car" + }, + { + "collection": "belongs", + "from": "car", + "to": "place" + }, + { + "collection": "resides", + "from": "place", + "to": "state" + }, + { + "collection": "lives", + "from": "person", + "to": "state" + } + ] + }, + "client": { + "commandinterface": { + "address": "localhost:50051" + }, + "test": { + "address": "localhost:50051" + }, + "stream": { + "address": "localhost:50051" + }, + "reflection": { + "address": "localhost:50051" + } + }, + "events": { + "kafka": { + "logger": { + "console": { + "handleExceptions": false, + "level": "debug", + "colorize": true, + "prettyPrint": true + } + }, + "provider": "kafka", + "groupId": "restore-chassis-test-server", + "kafka": { + "clientId": "restore-chassis-test-server", + "brokers": [ + "localhost:29092" + ] + }, + "test-event": { + "messageObject": "test.TestEvent" + }, + "testCreated": { + "messageObject": "test.TestEvent" + }, + "restoreCommand": { + "messageObject": "io.restorecommerce.commandinterface.CommandRequest" + }, + "restoreResponse": { + "messageObject": "io.restorecommerce.commandinterface.CommandResponse" + }, + "healthCheckResponse": { + "messageObject": "io.restorecommerce.commandinterface.CommandResponse" + }, + "resetResponse": { + "messageObject": "io.restorecommerce.commandinterface.CommandResponse" + }, + "versionResponse": { + "messageObject": "io.restorecommerce.commandinterface.CommandResponse" + }, + "configUpdateCommand": { + "messageObject": "io.restorecommerce.commandinterface.CommandRequest" + }, + "configUpdateResponse": { + "messageObject": "io.restorecommerce.commandinterface.CommandResponse" + }, + "setApiKeyCommand": { + "messageObject": "io.restorecommerce.commandinterface.CommandRequest" + }, + "setApiKeyResponse": { + "messageObject": "io.restorecommerce.commandinterface.CommandResponse" + }, + "flushCacheCommand": { + "messageObject": "io.restorecommerce.commandinterface.CommandRequest" + }, + "flushCacheResponse": { + "messageObject": "io.restorecommerce.commandinterface.CommandResponse" + }, + "topics": { + "command": { + "topic": "io.restorecommerce.command", + "events": [ + "resetResponse", + "restoreResponse", + "versionResponse", + "configUpdateResponse", + "setApiKeyResponse", + "flushCacheResponse" + ] + }, + "test.resource": { + "topic": "test" + } + } + } + }, + "server": { + "logger": { + "console": { + "handleExceptions": false, + "level": "silly", + "colorize": true, + "prettyPrint": true + } + }, + "transports": [ + { + "name": "grpcTest", + "provider": "grpc", + "addr": "localhost:50051" + } + ] + } +} \ No newline at end of file diff --git a/packages/chassis-srv/test/command.spec.ts b/packages/chassis-srv/test/command.spec.ts new file mode 100644 index 00000000..68751a35 --- /dev/null +++ b/packages/chassis-srv/test/command.spec.ts @@ -0,0 +1,456 @@ +// microservice chassis +import { CommandInterface, database, Server } from '../src/index.js'; +import * as should from 'should'; +import { createClient as createGrpcClient } from '@restorecommerce/grpc-client'; +import { Events, registerProtoMeta } from '@restorecommerce/kafka-client'; +import { createServiceConfig } from '@restorecommerce/service-config'; +import { createLogger } from '@restorecommerce/logger'; +import { createClient } from 'redis'; +import { + CommandInterfaceServiceDefinition, + CommandInterfaceServiceClient +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/commandinterface.js'; +import { + protoMetadata +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/test/test.js' +import { BindConfig } from '../src/microservice/transport/provider/grpc/index.js'; +import { Channel, createChannel } from 'nice-grpc'; +import { it, describe, beforeAll, afterAll, beforeEach } from 'vitest'; + +/** + * + * @param msg google.protobuf.Any + * @returns Arbitrary JSON + */ +const decodeMsg = (msg: any): any => { + const decoded = Buffer.from(msg.value).toString(); + return JSON.parse(decoded); +}; + +/** + * + * @param msg Arbitrary JSON + * @returns google.protobuf.Any formatted message + */ +const encodeMsg = (msg: any): any => { + + const stringified = JSON.stringify(msg); + const encoded = Buffer.from(stringified); + const ret = { + type_url: 'payload', + value: encoded + }; + return ret; +}; + +// register Test proto for emitting TestEvent +registerProtoMeta(protoMetadata); + +const cfg= createServiceConfig(process.cwd() + '/test'); +const logger = createLogger(cfg.get('logger')); + +/* + * Note: Running Kafka and ArangoDB instances are required. + */ +describe('CommandInterfaceService', () => { + let db: any; + let server: Server; + let events: Events; + const testEvent = { + value: 'a test event', + count: 0, + }; + let testTopic; + let commandTopic; + let validate; + let redisClient; + let channel: Channel; + let grpcClient: CommandInterfaceServiceClient; + const eventListener = async (msg: any, + context: any, config: any, eventName: string): Promise => { + logger.debug(`[TEST] Received event: ${eventName}`, msg); + await validate(msg, eventName); + }; + beforeAll(async function setup() { + + events = new Events(cfg.get('events:kafka'), logger); + await events.start(); + + testTopic = await events.topic(cfg.get('events:kafka:topics:test.resource:topic')); + commandTopic = await events.topic(cfg.get('events:kafka:topics:command:topic')); + // subscribe all response events + for (let eventName of cfg.get('events:kafka:topics:command:events')) { + await commandTopic.on(eventName, eventListener); + } + + server = new Server(cfg.get('server'), logger); + db = await database.get(cfg.get('database:arango'), logger); + await db.truncate(); + + const config = cfg.get(); + delete config.database.nedb; // not supported in default implementation + + // init redis client for subject index + const redisConfig = cfg.get('redis'); + redisConfig.db = cfg.get('redis:db-indexes:db-subject'); + redisClient = createClient(redisConfig); + await redisClient.connect(); + + const cis = new CommandInterface(server, cfg, logger, events, redisClient); + await server.bind('commandinterface', { + service: CommandInterfaceServiceDefinition, + implementation: cis + } as BindConfig); + await server.start(); + + channel = createChannel(cfg.get('client:commandinterface:address')); + grpcClient = createGrpcClient({ + ...cfg.get('client:commandinterface'), + logger + }, CommandInterfaceServiceDefinition, channel); + }, 30000); + afterAll(async function teardown() { + await server.stop(); + await events.stop(); + }, 30000); + describe('check', () => { + it('should return the status', async () => { + let cmdPayload = encodeMsg({ + service: 'commandinterface' + }); + + const msg = { + name: 'health_check', + payload: cmdPayload + }; + + // check commandinterface service, should serve + let resp = await grpcClient.command(msg); + should.exist(resp); + should.not.exist((resp as any).error); + let data = decodeMsg(resp); + should.exist(data.status); + data.status.should.equal('SERVING'); + + // should not serve if service does not exist + cmdPayload = encodeMsg({ + service: 'does_not_exist' + }); + // check none existing service, should throw error + resp = await grpcClient.command({ + name: 'health_check', + payload: cmdPayload + }); + should.exist(resp); + data = decodeMsg(resp); + should.not.exist((resp as any).error); // no exception thrown + should.exist(data.error); // tolerant error handling + data.error.code.should.equal(404); + data.error.message.should.equal('Service does_not_exist does not exist'); + // should check all binded services if no service is specified + cmdPayload = encodeMsg({ + service: '' + }); + // check server, should serve + resp = await grpcClient.command({ + name: 'health_check', + payload: cmdPayload + }); + should.not.exist((resp as any).error); + should.exist(resp); + data = decodeMsg(resp); + should.exist(data.status); + data.status.should.equal('SERVING'); + }); + }); + describe('reconfigure', () => { + it('should return an error since it is not implemented', async () => { + const resp = await grpcClient.command({ + name: 'reconfigure' + }); + const decodedResp = decodeMsg(resp); + should.exist(decodedResp.error); + decodedResp.error.code.should.equal(501); + decodedResp.error.message.should.equal('reconfigure is not implemented'); + }); + }); + describe('reset', () => { + const docID = 'test/value'; + beforeAll(async () => { + await db.insert('tests', { + id: docID, + value: 101, + }); + }); + it('should clean the database', async () => { + const validatePromise = new Promise((resolve, reject) => { + validate = async (msg: any, eventName: string): void => { + try { + eventName.should.equal('resetResponse'); + should.exist(msg.services); + msg.services.should.containEql('commandinterface'); + should.exist(msg.payload); + const payload = decodeMsg(msg.payload); + should.not.exist(payload.error); + resolve(); + } catch (err) { + reject(err); + } + }; + }); + + const offset = await commandTopic.$offset(BigInt(-1)); + const resp = await grpcClient.command({ + name: 'reset' + }); + await commandTopic.$wait(offset); + + const decodedResp = decodeMsg(resp); + should.not.exist(decodedResp.error); + should.exist(decodedResp.status); + decodedResp.status.should.equal('Reset concluded successfully'); + + const result = await db.findByID('tests', docID); + result.should.be.length(0); + + await validatePromise; + }); + + }); + + describe('restore', () => { + beforeAll(async function prepareKafka() { + for (let i = 0; i < 100; i += 1) { + testEvent.count = i; + await testTopic.emit('testCreated', testEvent); + } + }, 30000); + beforeEach(async () => { + await db.truncate('tests'); + }); + it('should re-read all data from specified offset', async function restore() { + const validatePromise = new Promise((resolve, reject) => { + validate = async (msg: any, eventName: string): void => { + try { + logger.debug(`[RESTORE] Received event: ${eventName}`); + eventName.should.equal('restoreResponse'); + should.exist(msg.services); + msg.services.should.containEql('commandinterface'); + should.exist(msg.payload); + const payload = decodeMsg(msg.payload); + should.not.exist(payload.error); + // restore conclusion is checked asynchronously, since it can take a variable + // and potentially large amount of time + const result = await db.find('tests', {}, { + sort: { + count: 1 + } + }); + for (let i = 0; i < 100; i++) { + result[i].count.should.equal(i); + } + resolve(); + } catch (err) { + reject(err); + } + }; + }); + + // waiting for restore conclusion + const offset: bigint = await commandTopic.$offset(BigInt(-1)); + const resourceOffset: bigint = await testTopic.$offset(BigInt(-1)); + + const cmdPayload = encodeMsg({ + data: [ + { + entity: 'test', + base_offset: Number(resourceOffset - BigInt(100)), + ignore_offset: [] + } + ] + }); + + const resp = await grpcClient.command({ + name: 'restore', + payload: cmdPayload + }); + should.not.exist((resp as any).error); + + await commandTopic.$wait(offset); + await validatePromise; + }, 30000); + }); + describe('version', () => { + it('should return the version of the package and nodejs', async () => { + const validatePromise = new Promise((resolve, reject) => { + validate = async (msg: any, eventName: string): void => { + try { + eventName.should.equal('versionResponse'); + should.exist(msg.services); + msg.services.should.containEql('commandinterface'); + should.exist(msg.payload); + const payload = decodeMsg(msg.payload); + should.exist(payload.version); + payload.version.should.equal(process.env.npm_package_version); + should.exist(payload.nodejs); + payload.nodejs.should.equal(process.version); + resolve(); + } catch (err) { + reject(err); + } + }; + }); + const offset = await commandTopic.$offset(BigInt(-1)); + const resp = await grpcClient.command({ + name: 'version', + }); + await commandTopic.$wait(offset); + const data = decodeMsg(resp); + should.exist(data.version); + data.version.should.equal(process.env.npm_package_version); + should.exist(data.nodejs); + data.nodejs.should.equal(process.version); + await validatePromise; + }); + }); + describe('setApiKey', () => { + it('should set the provided authentication api key on configuration', async () => { + const validatePromise = new Promise((resolve, reject) => { + validate = async (msg: any, eventName: string): void => { + try { + eventName.should.equal('setApiKeyResponse'); + should.exist(msg.services); + msg.services.should.containEql('commandinterface'); + should.exist(msg.payload); + const payload = decodeMsg(msg.payload); + should.exist(payload.status); + payload.status.should.equal('ApiKey set successfully'); + resolve(); + } catch (err) { + reject(err); + } + }; + }); + const offset = await commandTopic.$offset(BigInt(-1)); + const apiKeyPayload = encodeMsg({ + authentication: { + apiKey: 'test-api-key-value' + } + }); + const resp = await grpcClient.command({ + name: 'set_api_key', + payload: apiKeyPayload + }); + await commandTopic.$wait(offset); + const data = decodeMsg(resp); + should.exist(data.status); + data.status.should.equal('ApiKey set successfully'); + await validatePromise; + }); + }); + describe('configUpdate', () => { + it('should update the provide configuration', async () => { + const validatePromise = new Promise((resolve, reject) => { + validate = async (msg: any, eventName: string): void => { + try { + eventName.should.equal('configUpdateResponse'); + should.exist(msg.services); + msg.services.should.containEql('commandinterface'); + should.exist(msg.payload); + const payload = decodeMsg(msg.payload); + should.exist(payload.status); + payload.status.should.equal('Configuration updated successfully'); + resolve(); + } catch (err) { + reject(err); + } + }; + }); + const offset = await commandTopic.$offset(BigInt(-1)); + const configPayload = encodeMsg({ + authentication: { + } + }); + const resp = await grpcClient.command({ + name: 'config_update', + payload: configPayload + }); + await commandTopic.$wait(offset); + const data = decodeMsg(resp); + should.exist(data.status); + data.status.should.equal('Configuration updated successfully'); + await validatePromise; + }); + }); + describe('flushCache', () => { + it('should flush with given db_index and pattern', async () => { + const validatePromise = new Promise((resolve, reject) => { + validate = async (msg: any, eventName: string): void => { + try { + eventName.should.equal('flushCacheResponse'); + should.exist(msg.payload); + const payload = decodeMsg(msg.payload); + should.exist(payload.status); + payload.status.should.startWith('Successfully flushed cache'); + resolve(); + } catch (err) { + reject(err); + } + }; + }); + // store 120 keys to redis db index 3 + const redis = createClient({ database: 3 }); + await redis.connect(); + for (let i=0; i< 120; i++) { + let key = 'user' + i; + // set key and value as same + redis.set(key, key); + } + redis.set('testKey', 'testValue'); + let allKeys = await redis.keys('*'); + const offset = await commandTopic.$offset(BigInt(-1)); + const flushCachePayload = encodeMsg({ + data: + { + db_index: 3, + pattern: 'user' + } + }); + const resp = await grpcClient.command({ + name: 'flush_cache', + payload: flushCachePayload + }); + allKeys = await redis.keys('*'); + allKeys.length.should.equal(1); + allKeys[0].should.equal('testKey'); + await commandTopic.$wait(offset); + const data = decodeMsg(resp); + should.exist(data.status); + data.status.should.startWith('Successfully flushed cache'); + await validatePromise; + }); + it('flushdb should flush all keys in specific db_index when no pattern is specified', async () => { + // store 3 keys to redis db index 3 + const redis = createClient({ database: 3 }); + await redis.connect(); + await redis.set('user1', 'user1'); + await redis.set('user2', 'user2'); + await redis.set('testKey2', 'testValue2'); + const flushCachePayload = encodeMsg({ + data: + { + db_index: 3 // No pattern is specified + } + }); + const resp = await grpcClient.command({ + name: 'flush_cache', + payload: flushCachePayload + }); + const keys = redis.keys('*'); + keys.should.be.empty(); + const data = decodeMsg(resp); + should.exist(data.status); + data.status.should.equal('Successfully flushed cache with DB index 3'); + }); + }); +}); diff --git a/packages/chassis-srv/test/database.spec.ts b/packages/chassis-srv/test/database.spec.ts new file mode 100644 index 00000000..823cef2c --- /dev/null +++ b/packages/chassis-srv/test/database.spec.ts @@ -0,0 +1,682 @@ +import * as should from 'should'; +import * as _ from 'lodash'; +import { createLogger } from '@restorecommerce/logger'; +import { Database } from 'arangojs'; +import * as chassis from '../src/index.js'; +import { DatabaseProvider } from '../src/database/index.js'; +import { it, describe, beforeEach, afterEach } from 'vitest'; + +const config = chassis.config; +const database = chassis.database; + +let db: DatabaseProvider; + +/* global describe context it beforeEach */ + +const providers = [ + { + name: 'arango', + init: async (): Promise => { + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + const logger = createLogger(cfg.get('logger')); + return database.get(cfg.get('database:arango'), logger); + }, + drop: async (): Promise => { + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + + const dbHost: string = cfg.get('database:arango:host'); + const dbPort: string = cfg.get('database:arango:port'); + const dbName: string = cfg.get('database:arango:database'); + + const db = new Database('http://' + dbHost + ':' + dbPort); + await db.dropDatabase(dbName); + }, + custom: () => { + describe('testing custom queries', () => { + it('should register a custom query', () => { + const script = 'return "Hello World"'; + db.registerCustomQuery('helloWorld', script, 'query'); + const queries = db.listCustomQueries(); + should.exist(queries); + queries.should.have.length(1); + should.exist(queries[0][0]); + queries[0][0].should.equal('helloWorld'); + + should.exist(queries[0][1]); + should.exist(queries[0][1].code); + queries[0][1].code.should.equal(script); + should.exist(queries[0][1].type); + queries[0][1].type.should.equal('query'); + }); + it('should unregister a custom query', async () => { + const script = 'return "Hello World";'; + db.registerCustomQuery('helloWorld', script, 'query'); + let functions = db.listCustomQueries(); + should.exist(functions); + functions.should.have.length(1); + + db.unregisterCustomQuery('helloWorld'); + functions = db.listCustomQueries(); + should.exist(functions); + functions.should.have.length(0); + }); + it('should execute a custom query', async () => { + const script = `return "Hello World"`; + await db.registerCustomQuery('helloWorld', script, 'query'); + const result = await db.find('test', {}, { + customQueries: ['helloWorld'] + }); + should.exist(result); + result.should.have.length(1); + result[0].should.equal('Hello World'); + }); + it('should execute a custom query with custom parameters', async () => { + const script = `return @param`; + await db.registerCustomQuery('helloWorld', script, 'query'); + const result = await db.find('test', {}, { + customQueries: ['helloWorld'], + customArguments: { + param: 'Hello World' + } + }); + should.exist(result); + result.should.have.length(1); + result[0].should.equal('Hello World'); + }); + it('should execute a custom query which accesses the database', async () => { + const script = `for t in test return t`; + await db.registerCustomQuery('script', script, 'query'); + const result = await db.find('test', {}, { + customQueries: ['script'] + }); + should.exist(result); + result.should.have.length(8); + }); + it('should apply a custom filter within a `find` query', async () => { + const script = `filter node.id == @customArguments.param`; + await db.registerCustomQuery('script', script, 'filter'); + const result = await db.find('test', {}, { + customQueries: ['script'], + customArguments: { + param: '/test/sort0' + } + }); + + should.exist(result); + result.should.have.length(1); + + should.exist(result[0].id); + result[0].id.should.equal('/test/sort0'); + should.exist(result[0].include); + result[0].include.should.equal(true); + should.exist(result[0].value); + result[0].value.should.equal('c'); + }); + it('should combine a custom filter with normal filters', async () => { + const script = `filter node.value != @customArguments.param`; + await db.registerCustomQuery('script', script, 'filter'); + const result = await db.find('test', { + include: { + $eq: true + } + }, + { + customQueries: ['script'], + customArguments: { + param: 'a' + } + }); + + should.exist(result); + result.should.have.length(2); + + const sorted = _.sortBy(result, ['id']); + + should.exist(sorted[0].id); + sorted[0].id.should.equal('/test/sort0'); + should.exist(sorted[0].include); + sorted[0].include.should.equal(true); + should.exist(sorted[0].value); + sorted[0].value.should.equal('c'); + + should.exist(sorted[1].id); + sorted[1].id.should.equal('/test/sort4'); + should.exist(sorted[1].include); + sorted[1].include.should.equal(true); + should.exist(sorted[1].value); + sorted[1].value.should.equal('b'); + }); + }); + } + }, + { + name: 'nedb', + init: async (): Promise => { + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + const logger = createLogger(cfg.get('logger')); + return database.get(cfg.get('database:nedb'), logger); + }, + drop: async (): Promise => { }, + } +]; + +const testProvider = (providerCfg) => { + const collection = 'test'; + const testData = [ + { id: '/test/sort0', value: 'c', include: true }, + { id: '/test/sort1', include: false }, + { id: '/test/sort2', include: false }, + { id: '/test/sort3', value: 'a', include: true }, + { id: '/test/sort4', value: 'b', include: true }, + { id: '/test/sort5', include: false }, + { id: '/test/somethingDifferent', include: false }, + { id: '/test/sortDifferent', include: false }, + ]; + const document = testData[4]; + + // users data + const userCollection = 'users'; + const userData = [ + { id: '1', first_name: 'Jenny', last_name: 'Brookes' }, + { id: '2', first_name: 'Michael', last_name: 'Goldsmith' }, + { id: '3', first_name: 'Michela', last_name: 'Smith' }, + { id: '4', first_name: 'Michael', last_name: 'Bowden' }, + { id: '5', first_name: 'Stephanie', last_name: 'Stokes' }, + { id: '6', first_name: 'David', last_name: 'Müller' } + ]; + + // address data + const addressCollection = 'addresss'; + const addressData = [ + { id: '1', city: 'Stuttgart', country: 'Germany' }, + { id: '2', city: 'Berlin', country: 'Germany' }, + { id: '3', city: 'Munich', country: 'Germany' }, + { id: '4', city: 'Bern', country: 'Switzerland' }, + { id: '5', city: 'Zurich', country: 'Switzerland' }, + { id: '6', city: 'Basel', country: 'Switzerland' } + ]; + beforeEach(async () => { + db = await providerCfg.init(); + await db.insert(collection, testData); + should.exist(db); + const result = await db.count(collection, {}); + // insert user collection for full text search testcase + if (providerCfg.name === 'arango') { + await db.insert(userCollection, userData); + await db.insert(addressCollection, addressData); + } + }); + + afterEach(async () => { + await providerCfg.drop(); + }); + describe('upsert', () => { + it('should insert a new document if it does not exist with upsert operation', async () => { + const newDoc = { + id: '/test/testupsert', + name: 'test', + }; + let result = await db.upsert(collection, newDoc); + should.exist(result); + result.should.deepEqual([newDoc]); + newDoc.name = 'changed'; + result = await db.upsert(collection, newDoc); + result.should.deepEqual([newDoc]); + }); + it('should update existing document with upsert operation', async () => { + const newDoc = { + id: '/test/testupsert', + name: 'changedAgain', + }; + let result = await db.upsert(collection, newDoc); + should.exist(result); + result.should.deepEqual([newDoc]); + }); + }); + describe('count', () => { + it(`should return the number of documents + in the collection with blank filter`, async () => { + const result = await db.count(collection, {}); + should.exist(result); + result.should.equal(testData.length); + }); + it('should return one for filtering based on id', async () => { + const result = await db.count(collection, { id: testData[0].id }); + should.exist(result); + result.should.equal(1); + }); + }); + describe('truncate', () => { + it('should delete all collection', async () => { + await db.truncate(); + const result = await db.count(collection, {}); + should.exist(result); + result.should.equal(0); + }); + it('should delete all documents in provided collection', async () => { + await db.truncate(collection); + const result = await db.count(collection, {}); + should.exist(result); + result.should.equal(0); + }); + }); + describe('findByID', () => { + it('should find documents', async () => { + const result = await db.findByID(collection, document.id); + should.exist(result); + result.should.be.length(1); + result[0].should.deepEqual(document); + }); + }); + describe('find', () => { + describe('with id filter', () => { + it('should return a document', async () => { + const result = await db.find(collection, { + id: document.id, + }); + result.should.be.length(1); + result[0].should.deepEqual(document); + }); + }); + + describe('find', () => { + describe('with iLike filter', () => { + it('should return one filtering based on iLike', async () => { + + const result = await db.find('test', { + id: { + $iLike: '%sOrT%' + } + }); + result.should.be.length(7); + }); + }); + }); + + describe('with sort', () => { + it('should return documents sorted in ascending order', + async () => { + let sortOrderKey; + if (providerCfg.name == 'arango') { + sortOrderKey = 'ASC'; + } else if (providerCfg.name == 'nedb') { + sortOrderKey = 1; + } + const result = await db.find(collection, + { include: true }, + { sort: { value: sortOrderKey } }); // sort ascending + should.exist(result); + result.should.deepEqual([testData[3], testData[4], testData[0]]); + }); + it('should return documents sorted in descending order', + async () => { + let sortOrderKey; + if (providerCfg.name == 'arango') { + sortOrderKey = 'DESC'; + } else if (providerCfg.name == 'nedb') { + sortOrderKey = -1; + } + const result = await db.find(collection, + { include: true }, + { sort: { value: sortOrderKey } }); // sort descending + should.exist(result); + result.should.deepEqual([testData[0], testData[4], testData[3]]); + }); + }); + describe('with field limiting', () => { + it('should return documents with selected fields', async () => { + const result = await db.find(collection, + { include: true }, + // 0 is exclude and 1 is to include that particular key + { fields: { include: 0 } }); // exclude field 'include' + should.exist(result); + const resultKeep = await db.find(collection, + { include: true }, + { fields: { id: 1, value: 1 } }); // include only id and value fields + resultKeep.should.deepEqual(result); + // Not to modify the original data which is used in next test case + // to add and delete in beforeEach and afterEach + const clonedData = _.cloneDeep([testData[3], testData[4], testData[0]]); + const compareData = _.map(clonedData, (e) => { + _.unset(e, 'include'); + return e; + }); + _.sortBy(result, 'id').should.deepEqual(_.sortBy(compareData, 'id')); + }); + }); + describe('with limit', () => { + it('should return one document', async () => { + const result: Object = await db.find(collection, { + id: document.id, + }, + { + limit: 1 + }); + should.exist(result); + result.should.be.length(1); + result[0].should.deepEqual(document); + }); + }); + }); + describe('with filter operator', () => { + it('should return a document', async () => { + let result = await db.find(collection, { + $or: [ + { id: document.id }, + { value: 'new' } + ] + }); + should.exist(result); + result.should.be.length(1); + result[0].should.deepEqual(document); + + result = await db.find(collection, { + $or: [ + { + id: document.id, + }, + { + $and: [ + { + name: { + $in: ['test'], + }, + }, + { + value: { + $not: { + $gt: 10, + }, + }, + }, + ], + }, + ], + }); + should.exist(result); + result.should.be.length(1); + result[0].should.deepEqual(document); + + result = await db.find(collection, { + id: document.id, + }, + { + limit: 1, + offset: 1, + }); + result.should.be.empty(); + + result = await db.find(collection, { + id: { + $startswith: '/test', + }, + }); + result.should.be.length(testData.length); + + result = await db.find(collection, { + id: { + $endswith: '0', + }, + }); + result.should.be.length(1); + result[0].should.deepEqual(testData[0]); + + result = await db.find(collection, { + value: { + $isEmpty: null, + }, + }); + // 3 fields with value as an empty field + should.exist(result); + }); + }); + describe('inserting a document', () => { + it('should store a document', async () => { + const newDoc = { + id: 'testnew', + name: 'test', + }; + let insertResp = await db.insert(collection, newDoc); + insertResp[0].should.deepEqual(newDoc); + }); + it('should return an error response when inserting same document twice', async () => { + // inserting newDoc since in afterEach we drop .i.e. for every it() -> DB is dropped + const newDoc = { + id: 'testnew', + name: 'test', + }; + let insertResp = await db.insert(collection, newDoc); + insertResp[0].should.deepEqual(newDoc); + insertResp = await db.insert(collection, newDoc); + should.exist(insertResp); + insertResp[0].error.should.equal(true); + }); + }); + describe('update', () => { + it('should update document', async () => { + const newDoc = _.clone(document); + newDoc.value = 'new'; + await db.update(collection, [newDoc]); + let result = await db.findByID(collection, document.id); + result = result[0]; + result.should.deepEqual(newDoc); + }); + it('should return error response when updating document which does not exist', async () => { + const invalidDoc = { id: 'invlaid', include: false }; + let updateResp = await db.update(collection, [invalidDoc]); + should.exist(updateResp); + updateResp[0].error.should.equal(true); + updateResp[0].errorMessage.should.equal('document not found'); + }); + }); + describe('delete', () => { + it('should delete document and also return a response for missing / invalid doc ID', async () => { + let deleteResp = await db.delete(collection, [document.id, 'invalid']); + should.exist(deleteResp); + deleteResp.should.be.length(2); + should.exist(deleteResp[0]._id); + deleteResp[1].error.should.equal(true); + const result = await db.findByID(collection, document.id); + result.should.be.Array(); + result.should.be.length(0); + }); + }); + describe('query by date', () => { + it('should be able to query document by its time stamp', async () => { + const currentDate = new Date(); + const timeStamp1 = currentDate.setFullYear(currentDate.getFullYear()); + const timeStamp2 = currentDate.setFullYear(currentDate.getFullYear() + 1); + const timeStamp3 = currentDate.setFullYear(currentDate.getFullYear() + 2); + const timeData = [ + { id: 'a', created: timeStamp1 }, + { id: 'b', created: timeStamp2 }, + { id: 'c', created: timeStamp3 } + ]; + await db.insert(collection, timeData); + // should return first two documents + let result = await db.find(collection, { + $and: [ + { + created: { + $gte: timeStamp1 + } + }, + { + created: { + $lte: timeStamp2 + } + } + ], + }); + should.exist(result); + result.should.be.Array(); + result.should.be.length(2); + timeData.splice(2, 1); + result = _.sortBy(result, [(o) => { return o.id; }]); + result.should.deepEqual(timeData); + // truncate test DB + await db.truncate(); + }); + + if (providerCfg.name === 'arango') { + describe('full text search', () => { + it('should return all test docs ignorning search string when analayzer or view config not set', async () => { + let testDocs = await db.find(collection, {}, { search: { search: 'test' } }); + testDocs.length.should.equal(8); + }); + + it('should search with default case insensitive based on first name and last name', async () => { + // delay is added since the index takes a second (since we delete and create users in beforeEach and afterEach) + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + let usersFound = await db.find(userCollection, {}, { search: { search: 'Ich oWd' } }); + usersFound.length.should.equal(3); + usersFound[0].id.should.equal('4'); + usersFound[0].first_name.should.equal('Michael'); + usersFound[0].last_name.should.equal('Bowden'); + usersFound[1].id.should.equal('2'); + usersFound[1].first_name.startsWith('Mich').should.equal(true); + usersFound[1].last_name.endsWith('mith').should.equal(true); + usersFound[2].id.should.equal('3'); + usersFound[2].first_name.startsWith('Mich').should.equal(true); + usersFound[2].last_name.endsWith('mith').should.equal(true); + }, 5000); + + it('should search with default case insensitive based on city name and country name', async () => { + // delay is added since the index takes a second (since we delete and create users in beforeEach and afterEach) + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + let addressFound = await db.find(addressCollection, {}, { search: { search: 'ber man' } }); + addressFound.length.should.equal(4); + addressFound[0].city.should.equal('Berlin'); // Berlin, Germany (both terms match) + addressFound[1].city.should.equal('Bern'); + addressFound[2].country.should.equal('Germany'); // match becasue of Country Germany with search string `man` + addressFound[3].country.should.equal('Germany'); // match becasue of Country Germany with search string `man` + }, 5000); + + it('should search with case sensitive based on first name and last name', async () => { + // delay is added since the index takes a second (since we delete and create users in beforeEach and afterEach) + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + let usersFound = await db.find(userCollection, {}, { search: { search: 'Ich oWd', case_sensitive: true } }); + usersFound.length.should.equal(0); + usersFound = await db.find(userCollection, {}, { search: { search: 'Mic Bow', case_sensitive: true } }); + usersFound.length.should.equal(3); + usersFound[0].id.should.equal('4'); + usersFound[0].first_name.should.equal('Michael'); + usersFound[0].last_name.should.equal('Bowden'); + usersFound[1].id.should.equal('2'); + usersFound[1].first_name.startsWith('Mich').should.equal(true); + usersFound[1].last_name.endsWith('mith').should.equal(true); + usersFound[2].id.should.equal('3'); + usersFound[2].first_name.startsWith('Mich').should.equal(true); + usersFound[2].last_name.endsWith('mith').should.equal(true); + }, 5000); + + it('should search for umlauts', async () => { + // delay is added since the index takes a second (since we delete and create users in beforeEach and afterEach) + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + let usersFound = await db.find(userCollection, {}, { search: { search: 'müll' } }); + usersFound.length.should.equal(1); + usersFound[0].first_name.should.equal('David'); + usersFound[0].last_name.should.equal('Müller'); + }, 5000); + + it('should not return any result for any match of the search string', async () => { + // delay is added since the index takes a second (since we delete and create users in beforeEach and afterEach) + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + let usersFound = await db.find(userCollection, {}, { search: { search: 'does not exist' } }); + usersFound.length.should.equal(0); + }, 5000); + + it('should search with filter', async () => { + // delay is added since the index takes a second (since we delete and create users in beforeEach and afterEach) + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + let usersFound = await db.find(userCollection, { last_name: { $iLike: '%bow%' } }, { search: { search: 'mic' } }); + usersFound.length.should.equal(1); + usersFound[0].first_name.should.equal('Michael'); + usersFound[0].last_name.should.equal('Bowden'); + }, 5000); + + it('should return an error deleting analyzer since the view still exists', async () => { + // delay is added since the index takes a second (since we delete and create users in beforeEach and afterEach) + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + let resp = await db.deleteAnalyzer(['trigram', 'trigram_norm']); + resp.length.should.equal(2); + resp[0].id.should.equal('trigram'); + resp[0].code.should.equal(409); + resp[0].message.should.equal("analyzer in-use while removing arangosearch analyzer 'chassis-test::trigram'"); + resp[1].id.should.equal('trigram_norm'); + resp[1].code.should.equal(409); + resp[1].message.should.equal("analyzer in-use while removing arangosearch analyzer 'chassis-test::trigram_norm'"); + }, 5000); + + it('should return an error dropping view which does not exist', async () => { + // delay is added since the index takes a second (since we delete and create users in beforeEach and afterEach) + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + let resp = await db.dropView(['test']); + resp.length.should.equal(1); + resp[0].id.should.equal('test'); + resp[0].code.should.equal(404); + resp[0].message.should.equal('collection or view not found'); + }, 5000); + + it('should drop view', async () => { + // delay is added since the index takes a second (since we delete and create users in beforeEach and afterEach) + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + let resp = await db.dropView(['users_view']); + resp.length.should.equal(1); + resp[0].id.should.equal('users_view'); + resp[0].code.should.equal(200); + resp[0].message.should.equal('View users_view dropped successfully'); + }, 5000); + + it('should delete analyzers', async () => { + // delay is added since the index takes a second (since we delete and create users in beforeEach and afterEach) + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + // drop view and then analyzer + await db.dropView(['users_view', 'addresss_view']); + let resp = await db.deleteAnalyzer(['trigram', 'trigram_norm']); + resp.length.should.equal(2); + resp[0].id.should.equal('trigram'); + resp[0].code.should.equal(200); + resp[0].message.should.equal('Analyzer trigram deleted successfully'); + resp[1].id.should.equal('trigram_norm'); + resp[1].code.should.equal(200); + resp[1].message.should.equal('Analyzer trigram_norm deleted successfully'); + } , 5000); + + }); + } + }); + + if (providerCfg.custom) { + describe('custom tests', () => providerCfg.custom()); + } +}; + +providers.forEach((providerCfg) => { + describe(`with database provider ${providerCfg.name}`, () => { + testProvider(providerCfg); + }); +}); diff --git a/packages/chassis-srv/test/errors.spec.ts b/packages/chassis-srv/test/errors.spec.ts new file mode 100644 index 00000000..a899d607 --- /dev/null +++ b/packages/chassis-srv/test/errors.spec.ts @@ -0,0 +1,20 @@ +import * as should from 'should'; +import * as _ from 'lodash'; +import { errors } from '../src/index.js'; +import { it, describe } from 'vitest'; + +/* global describe it */ + +describe('error', () => { + _.forEach(errors, (Error, name) => { + describe(name, () => { + it('should be an Error', () => { + const error = new Error(name); + should.exist(error); + error.should.be.Error(); + error.name.should.equal(name); + should.exist(error.stack); + }); + }); + }); +}); diff --git a/packages/chassis-srv/test/graphs.spec.ts b/packages/chassis-srv/test/graphs.spec.ts new file mode 100644 index 00000000..a1a6f42f --- /dev/null +++ b/packages/chassis-srv/test/graphs.spec.ts @@ -0,0 +1,905 @@ +import * as should from 'should'; +import * as _ from 'lodash'; +import { createLogger } from '@restorecommerce/logger'; +import { Database } from 'arangojs'; +import * as chassis from '../src/index.js'; +const config = chassis.config; +const database = chassis.database; +import { + Options_Direction as Direction, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/graph.js'; +import { it, describe, beforeAll, afterAll } from 'vitest'; + +/* global describe context it beforeEach */ + +const providers = [ + { + name: 'arango', + init: async (): Promise => { + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + const logger = createLogger(cfg.get('logger')); + return database.get(cfg.get('database:arango'), logger, cfg.get('graph:graphName'), + cfg.get('graph:edgeDefinitions')); + } + } +]; + +const arrUnique = (arr: T[]) => { + return [...new Set(arr)]; +}; + +const testProvider = (providerCfg) => { + let db: any; + // STATE <-- lives PERSON has --> CAR belongsto --> PLACE resides --> STATE + const personCollectionName = 'person'; + const hasEdgeCollectionName = 'has'; + const carsCollectionName = 'car'; + const belongsEdgeCollectionName = 'belongs'; + const placeCollectionName = 'place'; + const residesEdgeCollectionName = 'resides'; + const stateCollectionName = 'state'; + const livesEdgeCollectionName = 'lives'; + + beforeAll(async () => { + db = await providerCfg.init(); + // create person vertex collection + await db.addVertexCollection(personCollectionName); + // create edge definition edgeCollectionName, fromVerticeCollection, toVerticeCollection + await db.addEdgeDefinition(hasEdgeCollectionName, personCollectionName, carsCollectionName); + await db.addEdgeDefinition(belongsEdgeCollectionName, carsCollectionName, placeCollectionName); + should.exist(db); + }); + afterAll(async () => { + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + + const dbName: string = cfg.get('database:arango:database'); + const dbHost: string = cfg.get('database:arango:host'); + const dbPort: string = cfg.get('database:arango:port'); + + const db = new Database('http://' + dbHost + ':' + dbPort); + await db.dropDatabase(dbName); + }); + + describe('Graphs Collection API', () => { + let result; + let edgeResult; + it('should create a vertex Person, Car and Place collection and insert data into it', async () => { + // persons + const personVertices = [ + { name: 'Alice', id: 'a' }, + { name: 'Bob', id: 'b' }, + { name: 'Charlie', id: 'c' }, + { name: 'Dave', id: 'd' }, + { name: 'Eve', id: 'e' } + ]; + result = await db.createVertex(personCollectionName, personVertices); + // verify the data from DB + let insertedVertices = await db.find('person'); + insertedVertices = _.sortBy(insertedVertices, [(o) => { return o.name; }]); + should.exist(insertedVertices); + insertedVertices.should.deepEqual(personVertices); + + // cars + const carVertices = [ + { name: 'carA', id: 'c1' }, + { name: 'carB', id: 'c2' }, + { name: 'carC', id: 'c3' }, + { name: 'carD', id: 'c4' }, + { name: 'carE', id: 'c5' } + ]; + result = await db.createVertex(carsCollectionName, carVertices); + // verify the data from DB + insertedVertices = await db.find('car'); + insertedVertices = _.sortBy(insertedVertices, [(o) => { return o.name; }]); + should.exist(insertedVertices); + insertedVertices.should.deepEqual(carVertices); + + // place + const placeVertices = [ + { name: 'placeA', id: 'p1' }, + { name: 'placeB', id: 'p2' }, + { name: 'placeC', id: 'p3' }, + { name: 'placeD', id: 'p4' }, + { name: 'placeE', id: 'p5' } + ]; + result = await db.createVertex(placeCollectionName, placeVertices); + // verify the data from DB + insertedVertices = await db.find('place'); + insertedVertices = _.sortBy(insertedVertices, [(o) => { return o.name; }]); + should.exist(insertedVertices); + insertedVertices.should.deepEqual(placeVertices); + + // state + const stateVertices = [ + { name: 'stateA', id: 's1' }, + { name: 'stateAA', id: 's11' }, + { name: 'stateB', id: 's2' }, + { name: 'stateBB', id: 's22' }, + { name: 'stateC', id: 's3' }, + { name: 'stateCC', id: 's33' }, + { name: 'stateD', id: 's4' }, + { name: 'stateDD', id: 's44' }, + { name: 'stateE', id: 's5' }, + { name: 'stateEE', id: 's55' } + ]; + result = await db.createVertex(stateCollectionName, stateVertices); + // verify the data from DB + insertedVertices = await db.find('state'); + insertedVertices = _.sortBy(insertedVertices, [(o) => { return o.name; }]); + should.exist(insertedVertices); + insertedVertices.should.deepEqual(stateVertices); + }); + it('should create "person has car", "car belongs to place", "place resides in state" edge collections and insert data into it', async () => { + let personCarEdges: any = [ + { info: 'Alice has Car A', _from: `person/a`, _to: `car/c1`, id: 'e1' }, + { info: 'Bob has Car B', _from: `person/b`, _to: `car/c2`, id: 'e2' }, + { info: 'Charlie has Car C', _from: `person/c`, _to: `car/c3`, id: 'e3' }, + { info: 'Dave has Car D', _from: `person/d`, _to: `car/c4`, id: 'e4' }, + { info: 'Eve has Car E', _from: `person/e`, _to: `car/c5`, id: 'e5' } + ]; + for (let personCarEdge of personCarEdges) { + edgeResult = await db.createEdge(hasEdgeCollectionName, personCarEdge); + } + let insertedEdges: any = await db.find('has'); + personCarEdges = _.sortBy(personCarEdges, [(o) => { return o.info; }]); + insertedEdges = _.sortBy(insertedEdges, [(o) => { return o.info; }]); + should.exist(insertedEdges); + insertedEdges.should.deepEqual(personCarEdges); + + let carPlaceEdges: any = [ + { info: 'Car A belongs to place P1', _from: `car/c1`, _to: `place/p1`, id: 'e6' }, + { info: 'Car B belongs to place P2', _from: `car/c2`, _to: `place/p2`, id: 'e7' }, + { info: 'Car C belongs to place P3', _from: `car/c3`, _to: `place/p3`, id: 'e8' }, + { info: 'Car D belongs to place P4', _from: `car/c4`, _to: `place/p4`, id: 'e9' }, + { info: 'Car E belongs to place P5', _from: `car/c5`, _to: `place/p5`, id: 'e10' } + ]; + for (let carPlaceEdge of carPlaceEdges) { + await db.createEdge(belongsEdgeCollectionName, carPlaceEdge); + } + insertedEdges = await db.find('belongs'); + carPlaceEdges = _.sortBy(carPlaceEdges, [(o) => { return o.info; }]); + insertedEdges = _.sortBy(insertedEdges, [(o) => { return o.info; }]); + should.exist(insertedEdges); + insertedEdges.should.deepEqual(carPlaceEdges); + + let placeStateEdges: any = [ + { info: 'Place A resides in state S1', _from: `place/p1`, _to: `state/s1`, id: 'e11' }, + { info: 'Place B resides in state S2', _from: `place/p2`, _to: `state/s2`, id: 'e12' }, + { info: 'Place C resides in state S3', _from: `place/p3`, _to: `state/s3`, id: 'e13' }, + { info: 'Place D resides in state S4', _from: `place/p4`, _to: `state/s4`, id: 'e14' }, + { info: 'Place E resides in state S5', _from: `place/p5`, _to: `state/s5`, id: 'e15' } + ]; + for (let placeStateEdge of placeStateEdges) { + await db.createEdge(residesEdgeCollectionName, placeStateEdge); + } + insertedEdges = await db.find('resides'); + placeStateEdges = _.sortBy(placeStateEdges, [(o) => { return o.info; }]); + insertedEdges = _.sortBy(insertedEdges, [(o) => { return o.info; }]); + should.exist(insertedEdges); + insertedEdges.should.deepEqual(placeStateEdges); + + let personStateEdges: any = [ + { info: 'Person A lives in state S1', _from: `person/a`, _to: `state/s11`, id: 'e16' }, + { info: 'Person B lives in state S2', _from: `person/b`, _to: `state/s22`, id: 'e17' }, + { info: 'Person C lives in state S3', _from: `person/c`, _to: `state/s33`, id: 'e18' }, + { info: 'Person D lives in state S4', _from: `person/d`, _to: `state/s44`, id: 'e19' }, + { info: 'Person E lives in state S5', _from: `person/e`, _to: `state/s55`, id: 'e20' } + ]; + for (let personStateEdge of personStateEdges) { + await db.createEdge(livesEdgeCollectionName, personStateEdge); + } + insertedEdges = await db.find('lives'); + personStateEdges = _.sortBy(personStateEdges, [(o) => { return o.info; }]); + insertedEdges = _.sortBy(insertedEdges, [(o) => { return o.info; }]); + should.exist(insertedEdges); + insertedEdges.should.deepEqual(personStateEdges); + }); + it('should verify incoming and outgoing edges', async () => { + // get incoming edges for Car C1 + const incomingEdges = await db.getInEdges(hasEdgeCollectionName, `car/c1`); + should.exist(incomingEdges); + incomingEdges.edges[0].info.should.equal('Alice has Car A'); + + // get outgoing edges for Car C1 + let outgoingEdges = await db.getOutEdges(belongsEdgeCollectionName, `car/c1`); + should.exist(outgoingEdges); + outgoingEdges.edges[0].info.should.equal('Car A belongs to place P1'); + }); + it('should throw an error for graph traversal for missing collection name / start_vertex', async () => { + // traverse graph + let errMessage = ''; + // missing collection name in vertices + try { + await db.traversal({ start_vertex_ids: ['a'] }, null, null, null, false); + } catch (err) { + errMessage = err.message; + } + // validate error message + errMessage.should.equal('missing collection name for vertex id a'); + // missing start vertices in vertices + try { + await db.traversal({ collection_name: 'person' }, null, null, null, false); + } catch (err) { + errMessage = err.message; + } + // validate error message + errMessage.should.equal('missing vertex id for collection_name person'); + // empty collection name for collections + try { + await db.traversal(null, { collection_name: '' }, null, null, false); + } catch (err) { + errMessage = err.message; + } + errMessage.should.equal('One of the Vertices or Collection should be defined'); + }); + it('should traverse the graph and return only vertices for Person A', async () => { + // traverse graph + let result = { data: [], paths: [] }; + const traversalResponse = await db.traversal({ collection_name: 'person', start_vertex_ids: ['a'] }, null, null, null, false); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + result.data = rootEntityData; + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + } + should.exist(result); + should.exist(result.data); + result.paths.should.be.empty(); + result.data.should.be.instanceof(Array).and.have.lengthOf(5); + }); + it('should traverse the graph and return both vertices and paths when paths flag is set to true', async () => { + // traverse graph + const result = { data: [], paths: [] }; + const traversalResponse = await db.traversal({ collection_name: 'person', start_vertex_ids: ['a'] }, null, null, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(5); + result.data[0].name.should.equal('carA'); + result.data[1].name.should.equal('stateAA'); + result.data[2].name.should.equal('placeA'); + result.data[3].name.should.equal('stateA'); + result.data[4].name.should.equal('Alice'); + result.paths.should.be.instanceof(Array).and.have.lengthOf(4); + }); + // include vertices + it('should traverse the graph with included vertices options and return only the included vertices', async () => { + // traverse graph + const result = { data: [], paths: [] }; + const traversalResponse = await db.traversal({ collection_name: 'person', start_vertex_ids: ['a'] }, null, { include_vertexs: ['car'] }, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(2); + result.data[0].name.should.equal('carA'); + result.data[1].name.should.equal('Alice'); + result.paths.should.be.instanceof(Array).and.have.lengthOf(1); + }); + // exclude vertices + it('should traverse the graph with excluded vertices options and return only traversed data with excluded vertices', async () => { + // traverse graph + const result = { data: [], paths: [] }; + const traversalResponse = await db.traversal({ collection_name: 'person', start_vertex_ids: ['a'] }, null, { exclude_vertexs: ['car'] }, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(4); + result.data[0].name.should.equal('stateAA'); + result.data[1].name.should.equal('placeA'); + result.data[2].name.should.equal('stateA'); + result.data[3].name.should.equal('Alice'); + result.paths.should.be.instanceof(Array).and.have.lengthOf(3); + }); + // include edges + it('should traverse the graph with included edges options and return vertices from included edges', async () => { + // traverse graph + const result = { data: [], paths: [] }; + let traversalResponse = await db.traversal({ collection_name: 'person', start_vertex_ids: ['a'] }, null, { include_edges: ['has'] }, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(2); + result.data[0].name.should.equal('carA'); + result.data[1].name.should.equal('Alice'); + result.paths.should.be.instanceof(Array).and.have.lengthOf(1); + }); + // exclude edges + it('should traverse the graph with exclude edges options and return vertices from excluded edges', async () => { + // traverse graph + const result = { data: [], paths: [] }; + let traversalResponse = await db.traversal({ collection_name: 'person', start_vertex_ids: ['a'] }, null, { exclude_edges: ['belongs'] }, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(4); + result.data[0].name.should.equal('carA'); + result.data[1].name.should.equal('stateAA'); + result.data[2].name.should.equal('stateA'); + result.data[3].name.should.equal('Alice'); + result.paths.should.be.instanceof(Array).and.have.lengthOf(3); + }); + // exclude one edge and include another edge of same entity + it('for 2 entities should exclude one entity edge and include another entity edge', async () => { + // traverse graph + const result = { data: [], paths: [] }; + let traversalResponse = await db.traversal({ collection_name: 'person', start_vertex_ids: ['a'] }, null, { exclude_edges: ['resides'], include_edges: ['lives'] }, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(2); + result.data[0].name.should.equal('stateAA'); + result.data[1].name.should.equal('Alice'); + result.paths.should.be.instanceof(Array).and.have.lengthOf(1); + }); + // collection traversal + it('should traverse the entire collection and return data from all traversed entities', async () => { + // traverse graph + const result = { data: [], paths: [] }; + let traversalResponse = await db.traversal(null, { collection_name: 'person' }, null, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(25); // 5 person, 5 states, 5 cars, 5 place and 5 state entities + result.paths.should.be.instanceof(Array).and.have.lengthOf(20); // 20 edges + }); + // Filter tests for collection traversal + it('with filters should traverse the collection and return data with filtering applied on respective entities', async () => { + // traverse graph with filtering for car and place entities + let result = { data: [], paths: [] }; + let traversalResponse = await db.traversal(null, { collection_name: 'person' }, null, + [{ + filters: [{ field: 'name', operation: 'eq', value: 'carA' }], + entity: 'car' + }, { + filters: [{ field: 'name', operation: 'eq', value: 'placeA' }], + entity: 'place' + }], + true); + let rootEntityData = await traversalResponse.rootCursor.all(); + let associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(17); // 5 person, 5 states, 1 cars, 1 place and 5 state entities + let filteredData = result.data.filter(e => e._id.startsWith('car/') || e._id.startsWith('place/')); + filteredData.should.be.length(2); + filteredData[0].name.should.equal('carA'); + filteredData[1].name.should.equal('placeA'); + + // traverse graph with filtering for state entities + result = { data: [], paths: [] }; + traversalResponse = await db.traversal(null, { collection_name: 'person' }, null, + [{ + filters: [{ field: 'name', operation: 'eq', value: 'stateA' }, { field: 'name', operation: 'eq', value: 'stateAA' }], + operator: 'or', // Default is AND operation + entity: 'state' + }], + true); + rootEntityData = await traversalResponse.rootCursor.all(); + associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(17); // 5 person, 2 states, 5 cars, 5 place entities + filteredData = result.data.filter(e => e._id.startsWith('state/')); + filteredData.should.be.length(2); + filteredData[0].name.should.equal('stateAA'); + filteredData[1].name.should.equal('stateA'); + }); + // filters with include vertices + it('should traverse the graph with filters and included vertices options and return only the filtered and included vertices', async () => { + // traverse graph with 1 included vertex + let result = { data: [], paths: [] }; + let traversalResponse = await db.traversal(null, { collection_name: 'person' }, { include_vertexs: ['car'], direction: Direction.OUTBOUND }, + [{ + filters: [{ field: 'name', operation: 'eq', value: 'carA' }, { field: 'name', operation: 'eq', value: 'carB' }], + operator: 'or', // Default is AND operation + entity: 'car' + }], + true); + let rootEntityData = await traversalResponse.rootCursor.all(); + let associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(7); // 5 person 2 cars + result.data[0].name.should.equal('carA'); + result.data[1].name.should.equal('carB'); + result.data[2].name.should.equal('Alice'); + result.data[3].name.should.equal('Bob'); + result.data[4].name.should.equal('Charlie'); + result.data[5].name.should.equal('Dave'); + result.data[6].name.should.equal('Eve'); + result.paths.should.be.instanceof(Array).and.have.lengthOf(2); + + // traverse graph with 2 included vertex + result = { data: [], paths: [] }; + traversalResponse = await db.traversal(null, { collection_name: 'person' }, { include_vertexs: ['car', 'state'], direction: Direction.OUTBOUND }, + [{ + filters: [{ field: 'name', operation: 'eq', value: 'carA' }, { field: 'name', operation: 'eq', value: 'carB' }], + operator: 'or', // Default is AND operation + entity: 'car' + }, { + filters: [{ field: 'name', operation: 'eq', value: 'stateAA' }, { field: 'name', operation: 'eq', value: 'stateBB' }], + operator: 'or', // Default is AND operation + entity: 'state' + }], + true); + rootEntityData = await traversalResponse.rootCursor.all(); + associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(9); // 5 person 2 cars 2 states + result.data[0].name.should.equal('carA'); + result.data[1].name.should.equal('stateAA'); + result.data[2].name.should.equal('carB'); + result.data[3].name.should.equal('stateBB'); + result.data[4].name.should.equal('Alice'); + result.data[5].name.should.equal('Bob'); + result.data[6].name.should.equal('Charlie'); + result.data[7].name.should.equal('Dave'); + result.data[8].name.should.equal('Eve'); + result.paths.should.be.instanceof(Array).and.have.lengthOf(4); + }); + // filter with exclude vertices + it('should traverse the graph with filters and excluded vertices options and return only the filtered and excluded vertices', async () => { + // traverse graph + const result = { data: [], paths: [] }; + let traversalResponse = await db.traversal({ collection_name: '', start_vertex_ids: [] }, { collection_name: 'person' }, { exclude_vertexs: ['car'] }, + [{ + filters: [{ field: 'name', operation: 'eq', value: 'stateA' }, { field: 'name', operation: 'eq', value: 'stateB' }], + operator: 'or', // Default is AND operation + entity: 'state' + }], true); + let rootEntityData = await traversalResponse.rootCursor.all(); + let associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(12); // 5 persons, 5 places, 2 satates + result.data[0].name.should.equal('placeA'); + result.data[1].name.should.equal('stateA'); + result.data[2].name.should.equal('placeB'); + result.data[3].name.should.equal('stateB'); + result.data[4].name.should.equal('placeC'); + result.data[5].name.should.equal('placeD'); + result.data[6].name.should.equal('placeE'); + result.data[7].name.should.equal('Alice'); + result.data[8].name.should.equal('Bob'); + result.data[9].name.should.equal('Charlie'); + result.data[10].name.should.equal('Dave'); + result.data[11].name.should.equal('Eve'); + result.paths.should.be.instanceof(Array).and.have.lengthOf(7); + }); + // filter with exclude edges + it('for 2 entities should exclude one entity edge and include another entity edge with filtering enabled on second edge entity', async () => { + let result = { data: [], paths: [] }; + // traverse graph with filtering for state entities (filter with exclude one edge and include other edge) + let traversalResponse = await db.traversal(null, { collection_name: 'person' }, { exclude_edges: ['resides'] }, + [{ + filters: [{ field: 'name', operation: 'eq', value: 'stateAA' }, { field: 'name', operation: 'eq', value: 'stateBB' }], + operator: 'or', // Default is AND operation + edge: 'lives' + }], + true); + let rootEntityData = await traversalResponse.rootCursor.all(); + let associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(17); // 5 person, 2 states, 5 cars, 5 place entities + let filteredData = result.data.filter(e => e._id.startsWith('state/')); + filteredData.should.be.length(2); + filteredData[0].name.should.equal('stateAA'); + filteredData[1].name.should.equal('stateBB'); + + result = { data: [], paths: [] }; + // with iLike traverse graph with filtering for state entities (filter with exclude one edge and include other edge) + traversalResponse = await db.traversal(null, { collection_name: 'person' }, { exclude_edges: ['resides'] }, + [{ + filter: [{ field: 'name', operation: 'iLike', value: 'StaTe%' }], + operator: 'or', // Default is AND operation + edge: 'lives' + }], + true); + rootEntityData = await traversalResponse.rootCursor.all(); + associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(20); // 5 person, 5 states, 5 cars, 5 place entities + filteredData = result.data.filter(e => e._id.startsWith('state/')); + filteredData.should.be.length(5); + filteredData[0].name.should.equal('stateAA'); + filteredData[1].name.should.equal('stateBB'); + filteredData[2].name.should.equal('stateCC'); + filteredData[3].name.should.equal('stateDD'); + filteredData[4].name.should.equal('stateEE'); + }); + // filter with include edges + it('should traverse the graph with filters and included edges and return only the filtered and included edge vertices data', async () => { + const result = { data: [], paths: [] }; + // traverse graph with filtering for state entities (filter with exclude one edge and include other edge) + let traversalResponse = await db.traversal(null, { collection_name: 'person' }, { include_edges: ['has', 'lives'] }, + [{ + filters: [{ field: 'name', operation: 'eq', value: 'stateAA' }, { field: 'name', operation: 'eq', value: 'stateBB' }], + operator: 'or', // Default is AND operation + edge: 'lives' + }], + true); + let rootEntityData = await traversalResponse.rootCursor.all(); + let associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(12); // 5 person, 2 states, 5 cars + let filteredData = result.data.filter(e => e._id.startsWith('state/')); + filteredData.should.be.length(2); + filteredData[0].name.should.equal('stateAA'); + filteredData[1].name.should.equal('stateBB'); + }); + // pagination - with limit should traverse along only the limit entities + it('pagination - should traverse the graph through only first two limited entities when limit filter is specified for root entity', async () => { + // traverse graph + const result = { data: [], paths: [] }; + let traversalResponse = await db.traversal(null, { collection_name: 'person', limit: 2 }, null, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(10); // 2 person, 2 states, 2 cars, 2 place and 2 state entities + result.paths.should.be.instanceof(Array).and.have.lengthOf(8); // 8 edges (4 edges from each person vertex) + const filteredData = result.data.filter(e => e._id.startsWith('person/')); + filteredData.should.be.length(2); + filteredData[0].name.should.equal('Alice'); + filteredData[1].name.should.equal('Bob'); + }); + // pagination with both limit and offset + it('pagination - should traverse the graph through only last two limited entities when limit and offset filter is specified for root entity', async () => { + // traverse graph + const result = { data: [], paths: [] }; + let traversalResponse = await db.traversal(null, { collection_name: 'person', limit: 2, offset: 3 }, null, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(10); // 2 person, 2 states, 2 cars, 2 place and 2 state entities + result.paths.should.be.instanceof(Array).and.have.lengthOf(8); // 8 edges (4 edges from each person vertex) + const filteredData = result.data.filter(e => e._id.startsWith('person/')); + filteredData.should.be.length(2); + filteredData[0].name.should.equal('Dave'); + filteredData[1].name.should.equal('Eve'); + }); + // traversal through list of vertices + it('array start vertices - should traverse the graph through list of specified start vertices', async () => { + // traverse graph + let result = { data: [], paths: [] }; + const traversalResponse = await db.traversal({ collection_name: 'person', start_vertex_ids: ['a', 'b', 'c'] }, null, null, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(15); // 3 persons, 3 state, 3 cars, 3 place, 3 states + result.paths.should.be.instanceof(Array).and.have.lengthOf(12); // 12 edges (4 edges from each person vertex) + const filteredData = result.data.filter(e => e._id.startsWith('person/')); + filteredData.should.be.length(3); + filteredData[0].name.should.equal('Alice'); + filteredData[1].name.should.equal('Bob'); + filteredData[2].name.should.equal('Charlie'); + }); + // traversal from Car entity with specified vertices + it('car entity - should traverse the graph from Car vertice and return list of traversed entities from Car entity', async () => { + // traverse graph + let result = { data: [], paths: [] }; + const traversalResponse = await db.traversal({ collection_name: 'car', start_vertex_ids: ['c1', 'c2'] }, null, null, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(6); // 2 cars, 2 place, 2 states + result.paths.should.be.instanceof(Array).and.have.lengthOf(4); // 4 edges (2 edges from each car vertex) + const filteredData = result.data.filter(e => e._id.startsWith('car/')); + filteredData.should.be.length(2); + filteredData[0].name.should.equal('carA'); + filteredData[1].name.should.equal('carB'); + }); + // collection traversal from car entity + it('car entity - should traverse the graph from Car Collection and return all list of traversed entities from Car entity', async () => { + // traverse graph + let result = { data: [], paths: [] }; + const traversalResponse = await db.traversal(null, { collection_name: 'car' }, null, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(15); // 5 cars, 5 place, 5 states + result.paths.should.be.instanceof(Array).and.have.lengthOf(10); // 10 edges (2 edges from each car vertex) + const filteredData = result.data.filter(e => e._id.startsWith('car/')); + filteredData.should.be.length(5); + filteredData[0].name.should.equal('carA'); + filteredData[1].name.should.equal('carB'); + filteredData[2].name.should.equal('carC'); + filteredData[3].name.should.equal('carD'); + filteredData[4].name.should.equal('carE'); + }); + // traversal from Place entity with inbound vertices + it('inbound traversal - should traverse the graph from Place vertice in inbound direction and return list of traversed entities from Place entity', async () => { + // traverse graph + let result = { data: [], paths: [] }; + const traversalResponse = await db.traversal({ collection_name: 'place', start_vertex_ids: ['p1'] }, null, { direction: Direction.INBOUND }, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(3); // 1 place, 1 car, 1 person + result.paths.should.be.instanceof(Array).and.have.lengthOf(2); // 2 edges ( Place <- Car <- Person ) + result.data[0].name.should.equal('carA'); + result.data[1].name.should.equal('Alice'); + result.data[2].name.should.equal('placeA'); + }); + // traversal from Place Collection with inbound vertices + it('inbound traversal - should traverse the graph from Place collection in inbound direction and return list of all traversed entities from Place entity', async () => { + // traverse graph + let result = { data: [], paths: [] }; + const traversalResponse = await db.traversal(null, { collection_name: 'place' }, { direction: Direction.INBOUND }, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + const associationEntityData = await traversalResponse.associationCursor.all(); + for (let data of associationEntityData) { + result.data.push(data.v); // extract only vertices data from associattion entity as it contains v, e, p + result.paths.push(data.p); + } + for (let rootEntity of rootEntityData) { + result.data.push(rootEntity); + } + result.paths = arrUnique(result.paths); + should.exist(result); + should.exist(result.data); + should.exist(result.paths); + result.data.should.be.instanceof(Array).and.have.lengthOf(15); // 5 place, 5 car, 5 person + result.paths.should.be.instanceof(Array).and.have.lengthOf(10); // 10 edges ( 2 from each place, Place <- Car <- Person ) + let filteredData = result.data.filter(e => e._id.startsWith('place/')); + filteredData.should.be.length(5); + filteredData = result.data.filter(e => e._id.startsWith('car/')); + filteredData.should.be.length(5); + filteredData = result.data.filter(e => e._id.startsWith('person/')); + filteredData.should.be.length(5); + }); + // sort root collection in DESC order + it('should sort the root collection in descending order and return data from all traversed entities', async () => { + // traverse graph + const result = { data: [], paths: [] }; + let traversalResponse = await db.traversal(null, { collection_name: 'person', sorts: { name: 'DESC' } }, null, null, true); + const rootEntityData = await traversalResponse.rootCursor.all(); + rootEntityData[0].name.should.equal('Eve'); + rootEntityData[1].name.should.equal('Dave'); + rootEntityData[2].name.should.equal('Charlie'); + rootEntityData[3].name.should.equal('Bob'); + rootEntityData[4].name.should.equal('Alice'); + }); + it('should update a vertice given the document handle', async () => { + const doc = await db.getVertex(personCollectionName, `person/e`); + // doc with updated name + doc.name = 'test'; + await db.update(personCollectionName, [doc]); + const docUpdated = await db.getVertex(personCollectionName, `person/e`); + docUpdated.name.should.equal('test'); + }); + it('should update a edge given the document handle', async () => { + const doc = await db.getEdge(hasEdgeCollectionName, edgeResult._id); + // doc with updated name + doc.info = 'test has Car E'; + await db.update(hasEdgeCollectionName, [doc]); + const edgeDoc = await db.getEdge(hasEdgeCollectionName, edgeResult._id); + edgeDoc.info.should.equal('test has Car E'); + }); + it('should remove a vertice given the document handle for Person B', async () => { + const removedDoc = await db.removeVertex(personCollectionName, `person/b`); + should.exist(removedDoc); + removedDoc[0]._id.should.equal('person/b'); + removedDoc[0]._key.should.equal('b'); + }); + it('should remove edge given the document handle', async () => { + const removedDoc = await db.removeEdge(hasEdgeCollectionName, edgeResult._id); + should.exist(removedDoc); + removedDoc.error.should.equal(false); + removedDoc.code.should.equal(202); + }); + }); +}; + +providers.forEach((providerCfg) => { + describe(`Graphs with database provider ${providerCfg.name}`, () => { + testProvider(providerCfg); + }); +}); diff --git a/packages/chassis-srv/test/grpc_reflection.spec.ts b/packages/chassis-srv/test/grpc_reflection.spec.ts new file mode 100644 index 00000000..ffd29387 --- /dev/null +++ b/packages/chassis-srv/test/grpc_reflection.spec.ts @@ -0,0 +1,130 @@ +import * as should from 'should'; +import { createLogger } from '@restorecommerce/logger'; +import * as chassis from '../src/index.js'; +import { Server, buildReflectionService } from '../src/index.js'; +import { createClient } from '@restorecommerce/grpc-client'; +import { + protoMetadata +} from '@restorecommerce/rc-grpc-clients/dist/generated/test/test.js'; +import { ServerReflectionService } from 'nice-grpc-server-reflection'; +import { Channel, createChannel } from 'nice-grpc'; +import { + ServerReflectionDefinition, + ServerReflectionClient, + DeepPartial, +} from '@restorecommerce/rc-grpc-clients/dist/generated/grpc/reflection/v1alpha/reflection.js'; +import { it, describe, beforeAll, afterAll, beforeEach, afterEach } from 'vitest'; + +const toAsync = async function* (requests: DeepPartial[]): AsyncIterable> { + for (const request of requests) { + yield request; + } +}; + +describe('binding the grpc.ServerReflection service', () => { + let server: Server; + beforeAll(async () => { + await chassis.config.load(process.cwd() + '/test'); + const cfg = await chassis.config.get(); + const logger = createLogger(cfg.get('logger')); + server = new Server(cfg.get('server'), logger); + + const reflectionService = buildReflectionService([ + { + descriptor: protoMetadata.fileDescriptor + } + ]); + + await server.bind('reflection', { + service: ServerReflectionService, + implementation: reflectionService + }); + + await server.start(); + await new Promise((resolve, reject) => { + setTimeout(resolve, 1000); + }); + }); + + afterAll(async function end() { + await server.stop(); + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + }, 4000); + describe('calling endpoint ServerReflectionInfo', () => { + let client: ServerReflectionClient; + let channel: Channel; + beforeEach(async () => { + const cfg = await chassis.config.get(); + const logger = createLogger(cfg.get('logger')); + channel = createChannel(cfg.get('client:reflection:address')); + client = createClient({ + ...cfg.get('client:reflection'), + logger + }, ServerReflectionDefinition, channel); + }); + afterEach(async () => { + await channel.close(); + }); + describe('with fileByFilename request', () => { + it('should return file by file name response', + async () => { + const request = client.serverReflectionInfo(toAsync([{ + fileByFilename: 'test/test.proto' + }])); + + for await (const data of request) { + should.exist(data.fileDescriptorResponse); + should.exist(data.fileDescriptorResponse.fileDescriptorProto); + data.fileDescriptorResponse.fileDescriptorProto.should.be.length(1); + } + }); + }); + describe('with fileContainingSymbol request', () => { + it('should return file containing path / symbol response', + async () => { + const request = client.serverReflectionInfo(toAsync([{ + fileContainingSymbol: 'test.Test' + }])); + + for await (const data of request) { + should.exist(data); + should.exist(data.fileDescriptorResponse); + should.exist(data.fileDescriptorResponse.fileDescriptorProto); + data.fileDescriptorResponse.fileDescriptorProto.should.be.length(1); + } + }); + }); + describe('with allExtensionNumbersOfType request', () => { + it('should get allExtensionsNumbers response', + async () => { + const request = client.serverReflectionInfo(toAsync([{ + allExtensionNumbersOfType: 'test.ExtendMe' + }])); + + for await (const data of request) { + should.exist(data); + should.exist(data.allExtensionNumbersResponse); + should.exist(data.allExtensionNumbersResponse.baseTypeName); + data.allExtensionNumbersResponse.baseTypeName.should.equal('test.ExtendMe'); + } + }); + }); + describe('with listServices request', () => { + it('should list all exposed services', + async () => { + const request = client.serverReflectionInfo(toAsync([{ + listServices: '' + }])); + + for await (const data of request) { + should.exist(data); + should.exist(data.listServicesResponse); + should.exist(data.listServicesResponse.service); + data.listServicesResponse.service.should.be.length(1); + } + }); + }); + }); +}); diff --git a/packages/chassis-srv/test/kafka_offsetstore.spec.ts b/packages/chassis-srv/test/kafka_offsetstore.spec.ts new file mode 100644 index 00000000..032740fe --- /dev/null +++ b/packages/chassis-srv/test/kafka_offsetstore.spec.ts @@ -0,0 +1,84 @@ +import * as should from 'should'; +import { Events, Topic, registerProtoMeta } from '@restorecommerce/kafka-client'; +import { createLogger } from '@restorecommerce/logger'; +import { OffsetStore } from '../src/offsets/index.js'; +import { createServiceConfig } from '@restorecommerce/service-config'; +import { + protoMetadata +} from '@restorecommerce/rc-grpc-clients/dist/generated/test/test.js'; +import { it, describe, beforeAll, afterAll, beforeEach, afterEach } from 'vitest'; + +registerProtoMeta(protoMetadata); + +/* global describe it before after */ + +describe('offsetStore', () => { + let events: Events; + const topicName = 'test'; + let topic: Topic; + let offsetStore: OffsetStore; + const eventName = 'testCreated'; + const testMessage = { value: 'testValue', count: 1 }; + + const cfg = createServiceConfig(process.cwd() + '/test'); + const logger = createLogger(cfg.get('logger')); + + beforeEach(async function start() { + events = new Events(cfg.get('events:kafka'), logger); + await events.start(); + }, 10000); + afterEach(async function stop() { + await offsetStore.stop(); + await events.stop(); + }, 10000); + + it('should emit an event and verify the stored offset value from redis', + async function testStoredOffsetValue() { + offsetStore = new OffsetStore(events, cfg, logger); + topic = await (events.topic(topicName)); + + const listener = (message, context) => { + testMessage.value.should.equal(message.value); + testMessage.count.should.equal(message.count); + }; + // get the current offsetValue for 'test' topic before emitting message + const currentOffset = await topic.$offset(BigInt(-1)); + // emit message to kafka + await topic.on(eventName, listener); + await topic.emit(eventName, testMessage); + const newOffset = await new Promise((resolve, reject) => { + setTimeout(async () => { + const offsetValue = await offsetStore.getOffset(topicName); + resolve(offsetValue); + }, 8000); + }); + should.exist(newOffset); + BigInt(newOffset).should.equal(currentOffset + BigInt(1)); + }, 15000); + it('should consume a previously emitted message from Kafka', + async function testConsumeListener() { + // emit testMessage to kafka + topic = await events.topic(topicName); + await topic.emit(eventName, testMessage); + + // start offsetTracker subscribing to previous offset value read + // from redis and consume the above message + offsetStore = new OffsetStore(events, cfg, logger); + const listener = async (message, context) => { + testMessage.value.should.equal(message.value); + testMessage.count.should.equal(message.count); + }; + + // get the current offsetValue for 'test' topic before emitting message + let startingOffset = await offsetStore.getOffset(topicName); + await topic.on(eventName, listener, { startingOffset }); + + // wait for 2sec so that message is consumed and + // test is not ended immediately + return new Promise((resolve, reject) => { + setTimeout(() => { + resolve(); + }, 2000); + }); + }, 10000); +}); diff --git a/packages/chassis-srv/test/microservice.spec.ts b/packages/chassis-srv/test/microservice.spec.ts new file mode 100644 index 00000000..af37c4db --- /dev/null +++ b/packages/chassis-srv/test/microservice.spec.ts @@ -0,0 +1,388 @@ +import * as should from 'should'; +import * as _ from 'lodash'; +import { createLogger } from '@restorecommerce/logger'; +import * as chassis from '../src/index.js'; +import { createClient } from '@restorecommerce/grpc-client'; +import { TestDefinition, TestClient } from '@restorecommerce/rc-grpc-clients/dist/generated/test/test.js'; +import { TestServiceImplementation, TestDefinition as ServerTestDefinition } from '@restorecommerce/rc-grpc-clients/dist/generated-server/test/test.js'; +import { BindConfig } from '../src/microservice/transport/provider/grpc/index.js'; +import { + StreamDefinition, + StreamServiceImplementation, + StreamClient +} from '@restorecommerce/rc-grpc-clients/dist/generated/test/test.js'; +import { Channel, createChannel } from 'nice-grpc'; +import { DeepPartial } from '@restorecommerce/rc-grpc-clients/dist/generated/grpc/reflection/v1alpha/reflection.js'; +import { it, describe, beforeAll, afterAll } from 'vitest'; + +const config = chassis.config; +const Server = chassis.Server; +const grpc = chassis.grpc; + +const status = { + code: 200, + message: 'success' +}; + +export const testService: TestServiceImplementation = { + test: async (request) => { + request.value.should.be.equal('hello'); + return { + result: 'welcome', + status + }; + }, + create: async (request) => { + return { + items: request.items.map(item => ({ + payload: item, + status + })), + total_count: request.items.length, + operation_status: status + }; + }, + throw: async () => { + throw new Error('forced error'); + }, + notFound: async () => ({ + status: { + code: 404, + message: 'test not found' + } + }), + notImplemented: async () => ({}), + read: async () => ({ + items: [], + total_count: 0, + }) +}; + +const streamService: StreamServiceImplementation = { + requestStream: async (request) => { + let result = ''; + for await (const item of request) { + result += item.value; + } + result.should.equal('ping'); + return { + result: 'pong', + status + }; + }, + async* responseStream(request) { + should.exist(request); + should.exist(request.value); + request.value.should.equal('ping'); + for (let i = 0; i < 3; i += 1) { + yield {result: `${i}`}; + } + }, + async* biStream(request) { + for await (const item of request) { + should.exist(item); + should.exist(item.value); + item.value.should.equal('ping'); + } + yield {result: 'pong'}; + } +}; + +const toAsync = async function* (requests: DeepPartial[]): AsyncIterable> { + for (const request of requests) { + yield request; + } +}; + +describe('microservice.Server', () => { + let server: chassis.Server; + describe('constructing the sever', () => { + it('should return a server when provided with correct config', + async () => { + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + const logger = createLogger(cfg.get('logger')); + server = new Server(cfg.get('server'), logger); + should.exist(server); + should.exist(server.logger); + should.exist(server.logger.log); + const levels = [ + 'error' + ]; + _.forEach(levels, (level) => { + should.exist(server.logger[level]); + }); + should.exist(server.transport); + should.exist(server.transport.grpcTest); + server.transport.grpcTest.should.be.an.instanceof(grpc.Server); + }); + }); + describe('calling bind', () => { + it('should wrap a service and create endpoints for each object function', + async () => { + await server.bind('test', { + service: ServerTestDefinition, + implementation: testService + } as BindConfig); + await server.bind('stream', { + service: StreamDefinition, + implementation: streamService + } as BindConfig); + }); + }); + describe('calling start', () => { + it('should expose the created endpoints via transports', + async () => { + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + const logger = createLogger(cfg.get('logger')); + let serving = false; + server.on('serving', () => { + serving = !serving; + }); + await server.start(); + await new Promise((resolve, reject) => { + setTimeout(resolve, 1000); + }); + serving.should.equal(true); + const testChannel = createChannel(cfg.get('client:test:address')); + const testClient: TestClient = createClient({ + ...cfg.get('client:test'), + logger + }, TestDefinition, testChannel); + should.exist(testClient); + // --- 'test' endpoint --- + const testResult = await testClient.test({value: 'hello'}); + should.exist(testResult.status); + testResult.status.code.should.equal(200); + testResult.status.message.should.equal('success'); + should.exist(testResult.result); + testResult.result.should.be.equal('welcome'); + + // --- 'testCreate' endpoint --- + const msg: any = { + testKey: 'testVal' + }; + const msgBuffer: any = Buffer.from(JSON.stringify(msg)); + const createResult = await testClient.create({ + items: [{ + value: 'helloWorld123', + data: {value: msgBuffer} + }] + }); + should.exist(createResult.operationStatus); + createResult.operationStatus.code.should.equal(200); + createResult.operationStatus.message.should.equal('success'); + should.exist(createResult.items); + // verify decoded google.protobuf.any buffered response + createResult.items[0].payload.value.should.equal('helloWorld123'); + const decodedBuffResp = JSON.parse(createResult.items[0].payload.data.value.toString()); + decodedBuffResp.testKey.should.equal('testVal'); + + // --- 'throw' endpoint --- + const throwResult = await testClient.throw({value: 'hello'}); + should.exist(throwResult.status); + throwResult.status.code.should.equal(500); + throwResult.status.message.should.equal('forced error'); + should.not.exist(throwResult.result); + + // --- 'notFound' endpoint --- + const notFoundResult = await testClient.notFound({value: 'hello'}); + should.exist(notFoundResult.status); + notFoundResult.status.code.should.equal(404); + notFoundResult.status.message.should.equal('test not found'); + should.not.exist(notFoundResult.result); + + // 'requestStream' + const streamChannel = createChannel(cfg.get('client:stream:address')); + const streamClient: StreamClient = createClient({ + ...cfg.get('client:stream'), + logger + }, StreamDefinition, streamChannel); + const streamResult = await streamClient.requestStream(toAsync([{ + value: 'ping' + }])); + should.exist(streamResult.status); + streamResult.status.code.should.equal(200); + streamResult.status.message.should.equal('success'); + should.exist(streamResult); + should.exist(streamResult.result); + streamResult.result.should.be.equal('pong'); + + // 'responseStream' + const responseStreamRequest = streamClient.responseStream({ + value: 'ping' + }); + let concatDataResp = []; + for await (const response of responseStreamRequest) { + concatDataResp.push(response.result); + } + concatDataResp.should.deepEqual(['0', '1', '2']); + + // 'biStream' + const biStreamRequest = await streamClient.biStream(toAsync([{ + value: 'ping' + }])); + for await (const response of biStreamRequest) { + response.result.should.be.equal('pong'); + } + }); + }); + + describe('connecting with multiple clients', () => { + it('should be possible', async () => { + const numClients = 3; + const clients: TestClient[] = []; + const cfg = await chassis.config.get(); + for (let i = 0; i < numClients; i += 1) { + const channel = createChannel(cfg.get('client:test:address')); + const client = createClient({ + ...cfg.get('client:test'), + logger: createLogger(cfg.get('logger')) + }, TestDefinition, channel); + clients.push(client); + } + + const reqs = []; + for (let i = 0; i < numClients; i += 1) { + reqs.push(clients[i].test({ + value: 'hello', + })); + } + + const resps = await Promise.all(reqs); + for (let i = 0; i < resps.length; i += 1) { + const response = await resps[i]; + should.exist(response.status); + response.status.code.should.equal(200); + response.status.message.should.equal('success'); + should.exist(response.result); + response.result.should.be.equal('welcome'); + } + }); + }); + describe('calling end', () => { + it('should stop the server and no longer provide endpoints', + async () => { + await server.stop(); + }); + }); +}); + +describe('microservice.Client', () => { + let channel: Channel; + let client: TestClient; + let server: chassis.Server; + describe('constructing the client', () => { + it('should create a client when providing correct configuration', + async () => { + await config.load(process.cwd() + '/test'); + const cfg = await chassis.config.get(); + channel = createChannel(cfg.get('client:test:address')); + client = createClient({ + ...cfg.get('client:test'), + logger: createLogger(cfg.get('logger')) + }, TestDefinition, channel); + should.exist(client); + }); + }); + describe('with running server', () => { + beforeAll(async () => { + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + const logger = createLogger(cfg.get('logger')); + server = new Server(cfg.get('server'), logger); + await server.bind('test', { + service: ServerTestDefinition, + implementation: testService + } as BindConfig); + await server.start(); + await new Promise((resolve, reject) => { + setTimeout(resolve, 1000); + }); + }); + afterAll(async () => { + await server.stop(); + }); + describe('connect', () => { + it('should return a service object with endpoint functions', + async () => { + should.exist(client); + should.exist(client.test); + should.exist(client.throw); + should.exist(client.notImplemented); + should.exist(client.notFound); + + // test + let result = await client.test({ + value: 'hello', + }); + should.exist(result); + should.exist(result.status); + result.status.code.should.equal(200); + result.status.message.should.equal('success'); + should.exist(result.result); + result.result.should.equal('welcome'); + + // test with timeout + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + cfg.set('client:test:timeout', 5000); + const channel = createChannel(cfg.get('client:test:address')); + const newGrpcClient = createClient({ + ...cfg.get('client:test'), + logger: createLogger(cfg.get('logger')) + }, TestDefinition, channel); + result = await newGrpcClient.test({ + value: 'hello', + }); + should.exist(result); + should.exist(result.status); + result.status.code.should.equal(200); + result.status.message.should.equal('success'); + should.exist(result.result); + result.result.should.equal('welcome'); + }); + }); + describe('end', () => { + it('should disconnect from all endpoints', async () => { + await channel.close(); + }); + }); + }); + describe('without a running server', () => { + describe('connect', () => { + it('Call should not be created from a closed channel ', + async () => { + should.exist(client); + should.exist(client.test); + should.exist(client.throw); + should.exist(client.notImplemented); + + // test + await config.load(process.cwd() + '/test'); + const cfg = await config.get(); + cfg.set('client:test:timeout', 1); + const channel = createChannel(cfg.get('client:test:address')); + const timeoutGrpcClient = createClient({ + ...cfg.get('client:test'), + logger: createLogger(cfg.get('logger')) + }, TestDefinition, channel); + try { + await timeoutGrpcClient.test({ + value: 'hello', + }); + } catch (err) { + should.exist(err); + err.message.should.equal('/test.Test/Test DEADLINE_EXCEEDED: Deadline exceeded'); + } + }); + }); + describe('end', () => { + it('should disconnect from all endpoints', + async () => { + await channel.close(); + }); + }); + }); +}); diff --git a/packages/chassis-srv/test/transport.spec.ts b/packages/chassis-srv/test/transport.spec.ts new file mode 100644 index 00000000..056716e2 --- /dev/null +++ b/packages/chassis-srv/test/transport.spec.ts @@ -0,0 +1,162 @@ +import * as should from 'should'; +import { createLogger } from '@restorecommerce/logger'; +import { createClient } from '@restorecommerce/grpc-client'; +import { grpcServer } from '../src/index.js'; +import { TestClient, TestDefinition } from '@restorecommerce/rc-grpc-clients/dist/generated/test/test.js'; +import { TestDefinition as ServerTestDefinition } from '@restorecommerce/rc-grpc-clients/dist/generated-server/test/test.js'; +import { testService } from './microservice.spec.js'; +import { BindConfig } from '../src/microservice/transport/provider/grpc/index.js'; +import { createChannel } from 'nice-grpc'; +import { it, describe, beforeAll, afterAll } from 'vitest'; +/* global describe it before after*/ + +const providers = [{ + config: { + client: { + test: { + address: 'localhost:50060' + } + }, + server: { + name: 'grpcTest', + provider: 'grpc', + addr: 'localhost:50060' + }, + logger: { + console: { + handleExceptions: false, + level: 'crit', + colorize: true, + prettyPrint: true + } + } + }, + name: 'grpc', + Server: grpcServer, +}]; + +providers.forEach((provider) => { + describe(`transport provider ${provider.name}`, () => { + describe('the server', () => { + const ProviderServer = provider.Server; + let server: grpcServer; + it('should conform to a server provider', () => { + should.exist(ProviderServer.constructor); + should.exist(ProviderServer.prototype.bind); + should.exist(ProviderServer.prototype.start); + should.exist(ProviderServer.prototype.end); + }); + describe('constructing the server provider with proper config', + () => { + it('should result in a server transport provider', () => { + const logger = createLogger(provider.config.logger); + server = new ProviderServer(provider.config.server, logger); + should.exist(server); + }); + }); + describe('binding a service', () => { + it('should result in a wrapped service', async () => { + await server.bind({ + service: ServerTestDefinition, + implementation: testService + } as BindConfig); + }); + }); + describe('start', () => { + it('should start the server', async () => { + await server.start(); + await new Promise((resolve, reject) => { + setTimeout(resolve, 1000); + }); + }); + }); + describe('end', () => { + it('should stop the server', async () => { + await server.end(); + }); + }); + }); + describe('the client', () => { + let client: TestClient; + const methodName = 'test'; + let endpoint; + const response = { + result: 'welcome', + status: { + code: 200, + message: 'success' + } + }; + const request = { + value: 'hello', + }; + describe('constructing the client provider with proper config', + () => { + it('should result in a client transport provider', () => { + const logger = createLogger(provider.config.logger); + const channel = createChannel(provider.config.client.test.address); + client = createClient({ + ...provider.config.client.test, + logger + }, TestDefinition, channel); + should.exist(client); + }); + }); + describe('makeEndpoint', () => { + // temp disable as due to retry it times out + // describe('without running server', function runWithoutServer() { + // this.slow(200); + // it('should fail', async () => { + // endpoint = client[methodName]; + // try { + // await endpoint({}); + // } catch (err) { + // err.message.startsWith('/test.Test/Test UNAVAILABLE: No connection established').should.equal(true); + // } + // }); + // }); + describe('with running server', () => { + const errMessage = 'forced error'; + let server: grpcServer; + beforeAll(async function startServer() { + const logger = createLogger(provider.config.logger); + server = new provider.Server(provider.config.server, logger); + await server.bind({ + service: ServerTestDefinition, + implementation: testService + } as BindConfig); + await server.start(); + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + }, 5000); + afterAll(async () => { + await server.end(); + }); + it('should create an endpoint', () => { + endpoint = client[methodName]; + should.exist(endpoint); + }); + it('should succeed when calling with empty context', + async () => { + const result = await endpoint(request, {}); + should.deepEqual(response, result); + }); + it('should succeed when calling without context', + async () => { + const result = await endpoint(request); + should.deepEqual(response, result); + }); + it('should return an error when calling failing endpoint', + async () => { + const endpointThrow = client['throw']; + should.exist(endpoint); + const result = await endpointThrow(request); + result.status.code.should.equal(500); + result.status.message.should.equal('forced error'); + }); + }); + }); + }); + }); +}); diff --git a/packages/chassis-srv/test/views/address_view.json b/packages/chassis-srv/test/views/address_view.json new file mode 100644 index 00000000..5e23f36b --- /dev/null +++ b/packages/chassis-srv/test/views/address_view.json @@ -0,0 +1,79 @@ +{ + "view": { + "collectionName": "addresss", + "viewName": "addresss_view", + "similarityThreshold": 0.1, + "options": { + "type": "arangosearch", + "links": { + "addresss": { + "includeAllFields": true, + "fields": { + "city": { + "analyzers": [ + "trigram", + "trigram_norm" + ] + }, + "country": { + "analyzers": [ + "trigram", + "trigram_norm" + ] + } + } + } + } + } + }, + "analyzers": [ + "trigram", + "trigram_norm" + ], + "analyzerOptions": [ + { + "trigram": { + "type": "ngram", + "properties": { + "min": 3, + "max": 3, + "preserveOriginal": true + }, + "features": [ + "frequency", + "position", + "norm" + ] + } + }, + { + "trigram_norm": { + "type": "pipeline", + "properties": { + "pipeline": [ + { + "type": "norm", + "properties": { + "locale": "de.utf-8", + "case": "lower" + } + }, + { + "type": "ngram", + "properties": { + "min": 3, + "max": 3, + "preserveOriginal": true + } + } + ] + }, + "features": [ + "frequency", + "position", + "norm" + ] + } + } + ] + } \ No newline at end of file diff --git a/packages/chassis-srv/test/views/users_view.json b/packages/chassis-srv/test/views/users_view.json new file mode 100644 index 00000000..32787722 --- /dev/null +++ b/packages/chassis-srv/test/views/users_view.json @@ -0,0 +1,79 @@ +{ + "view": { + "collectionName": "users", + "viewName": "users_view", + "similarityThreshold": 0.1, + "options": { + "type": "arangosearch", + "links": { + "users": { + "includeAllFields": true, + "fields": { + "first_name": { + "analyzers": [ + "trigram", + "trigram_norm" + ] + }, + "last_name": { + "analyzers": [ + "trigram", + "trigram_norm" + ] + } + } + } + } + } + }, + "analyzers": [ + "trigram", + "trigram_norm" + ], + "analyzerOptions": [ + { + "trigram": { + "type": "ngram", + "properties": { + "min": 3, + "max": 3, + "preserveOriginal": true + }, + "features": [ + "frequency", + "position", + "norm" + ] + } + }, + { + "trigram_norm": { + "type": "pipeline", + "properties": { + "pipeline": [ + { + "type": "norm", + "properties": { + "locale": "de.utf-8", + "case": "lower" + } + }, + { + "type": "ngram", + "properties": { + "min": 3, + "max": 3, + "preserveOriginal": true + } + } + ] + }, + "features": [ + "frequency", + "position", + "norm" + ] + } + } + ] + } \ No newline at end of file diff --git a/packages/chassis-srv/tsconfig.json b/packages/chassis-srv/tsconfig.json new file mode 100644 index 00000000..4648f302 --- /dev/null +++ b/packages/chassis-srv/tsconfig.json @@ -0,0 +1,16 @@ +{ + "extends": "../../node_modules/@restorecommerce/dev/tsconfig.json", + "compilerOptions": { + "outDir": "lib", + "emitDecoratorMetadata": true, + "experimentalDecorators": true + }, + "include": [ + "./src/**/*.ts" + ], + "exclude": [ + "node_modules", + "lib", + "doc" + ] +} diff --git a/packages/chassis-srv/tsconfig.test.json b/packages/chassis-srv/tsconfig.test.json new file mode 100644 index 00000000..dec1c01f --- /dev/null +++ b/packages/chassis-srv/tsconfig.test.json @@ -0,0 +1,10 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "test", + "noImplicitAny": false + }, + "include": [ + "./test/**/*.ts" + ] +} diff --git a/packages/chassis-srv/vitest.config.ts b/packages/chassis-srv/vitest.config.ts new file mode 100644 index 00000000..e1a270bc --- /dev/null +++ b/packages/chassis-srv/vitest.config.ts @@ -0,0 +1,2 @@ +import config from '../../node_modules/@restorecommerce/dev/vitest.config.js'; +export default config; diff --git a/packages/kafka-client/src/events/provider/kafka/index.ts b/packages/kafka-client/src/events/provider/kafka/index.ts index de3cb97d..02bd2124 100644 --- a/packages/kafka-client/src/events/provider/kafka/index.ts +++ b/packages/kafka-client/src/events/provider/kafka/index.ts @@ -286,58 +286,7 @@ export class Topic { private async initConsumerIfNotExists(queue?: boolean): Promise { if (!this.consumer) { - this.consumer = new Consumer({ - groupId: this.provider.config.groupId + '_' + this.name, - ...this.provider.commonOptions - }); - - this.consumer.on('client:broker:connect', (err: any) => { - this.provider.logger.info('Consumer is ready.', err); - }); - - this.consumer.on('client:broker:disconnect', (err: any) => { - this.provider.logger.warn('Consumer connection failed:', err); - }); - - this.consumer.on('client:broker:failed', (err: any) => { - this.provider.logger.warn('Consumer connection failed:', err); - }); - - this.consumer.on('client:broker:drain', (err: any) => { - this.provider.logger.info('Consumer broker ready for requests:', err); - }); - - this.consumer.on('client:metadata', (err: any) => { - this.provider.logger.info('Consumer broker metadata:', err); - }); - - this.consumer.on('client:close', (err: any) => { - this.provider.logger.warn('Consumer client closed:', err); - }); - - this.consumer.on('consumer:group:join', (err: any) => { - this.provider.logger.info('Consumer joining group:', err); - }); - - this.consumer.on('consumer:group:leave', (err: any) => { - this.provider.logger.info('Consumer leaving group:', err); - }); - - this.consumer.on('consumer:group:rejoin', (err: any) => { - this.provider.logger.warn('Consumer re-joining group:', err); - }); - - this.consumer.on('consumer:group:rebalance', (err: any) => { - this.provider.logger.warn('Consumer group rebalancing:', err); - }); - - this.consumer.on('consumer:heartbeat:cancel', (err: any) => { - this.provider.logger.warn('Consumer heartbeat cancelled:', err); - }); - - this.consumer.on('consumer:heartbeat:error', (err: any) => { - this.provider.logger.error('Consumer heartbeat error:', err); - }); + this.consumer = await this.provider.newConsumer(this.provider.config.groupId + '_' + this.name); await this.consumer.connectToBrokers().then(() => { this.provider.logger.info(`Consumer for topic '${this.name}' connected`); @@ -848,6 +797,63 @@ export class Kafka { throw errors; } } + + async newConsumer(groupId: string): Promise { + const consumer = new Consumer({ + groupId: groupId, + ...this.commonOptions + }); + + consumer.on('client:broker:connect', (err: any) => { + this.logger.info('Consumer is ready.', err); + }); + + consumer.on('client:broker:disconnect', (err: any) => { + this.logger.warn('Consumer connection failed:', err); + }); + + consumer.on('client:broker:failed', (err: any) => { + this.logger.warn('Consumer connection failed:', err); + }); + + consumer.on('client:broker:drain', (err: any) => { + this.logger.info('Consumer broker ready for requests:', err); + }); + + consumer.on('client:metadata', (err: any) => { + this.logger.info('Consumer broker metadata:', err); + }); + + consumer.on('client:close', (err: any) => { + this.logger.warn('Consumer client closed:', err); + }); + + consumer.on('consumer:group:join', (err: any) => { + this.logger.info('Consumer joining group:', err); + }); + + consumer.on('consumer:group:leave', (err: any) => { + this.logger.info('Consumer leaving group:', err); + }); + + consumer.on('consumer:group:rejoin', (err: any) => { + this.logger.warn('Consumer re-joining group:', err); + }); + + consumer.on('consumer:group:rebalance', (err: any) => { + this.logger.warn('Consumer group rebalancing:', err); + }); + + consumer.on('consumer:heartbeat:cancel', (err: any) => { + this.logger.warn('Consumer heartbeat cancelled:', err); + }); + + consumer.on('consumer:heartbeat:error', (err: any) => { + this.logger.error('Consumer heartbeat error:', err); + }); + + return consumer; + } } /** diff --git a/packages/resource-base-interface/.editorconfig b/packages/resource-base-interface/.editorconfig new file mode 100644 index 00000000..0449b030 --- /dev/null +++ b/packages/resource-base-interface/.editorconfig @@ -0,0 +1,14 @@ + + +root = true + +[*] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false diff --git a/packages/resource-base-interface/.gitignore b/packages/resource-base-interface/.gitignore new file mode 100644 index 00000000..607d7ab7 --- /dev/null +++ b/packages/resource-base-interface/.gitignore @@ -0,0 +1,21 @@ +*~ +*.bk +*.d.ts +*.d.ts.map +*.js +*.js.map +*.log + +.directory/ +.idea/ +.nyc_output/ +.project/ +.settings/ +.vscode/ + +coverage/ +lib/ +node_modules/ +npm-debug.log + +!.eslintrc.js diff --git a/packages/resource-base-interface/.mocharc.json b/packages/resource-base-interface/.mocharc.json new file mode 100644 index 00000000..c9108038 --- /dev/null +++ b/packages/resource-base-interface/.mocharc.json @@ -0,0 +1,6 @@ +{ + "extension": ["ts"], + "spec": "test/**/*.spec.ts", + "require": "ts-node/register" +} + \ No newline at end of file diff --git a/packages/resource-base-interface/.npmignore b/packages/resource-base-interface/.npmignore new file mode 100644 index 00000000..1b550fc1 --- /dev/null +++ b/packages/resource-base-interface/.npmignore @@ -0,0 +1,5 @@ +.* +coverage +src +test + diff --git a/packages/resource-base-interface/CHANGELOG.md b/packages/resource-base-interface/CHANGELOG.md new file mode 100644 index 00000000..7c90776b --- /dev/null +++ b/packages/resource-base-interface/CHANGELOG.md @@ -0,0 +1,224 @@ +### 1.7.0 (April 25th, 2025) + +- refine meta default behaviour +- add experimental resource aggregators + +### 1.6.2 (June 27th, 2024) + +- up deps + +### 1.6.1 (May 14th, 2024) + +- up chassis-srv for removing response logging + +### 1.6.0 (April 20th, 2024) + +- up custom arguments prefix (so that array can be supported for bind vars) + +### 1.5.0 (April 15th, 2024) + +- up deps + +### 1.4.8 (March 19th, 2024) + +- up deps + +### 1.4.7 (February 29th, 2024) + +- add null check + +### 1.4.6 (February 29th, 2024) + +- update deps + +### 1.4.5 (February 21st, 2024) + +- update deps + +### 1.4.4 (November 26th, 2023) + +- removed deprecated method (collection.load) + +### 1.4.3 (November 25th, 2023) + +- updated all dependencies (added created_by to meta object) + +### 1.4.2 (November 20th, 2023) + +- updated all dependencies + +### 1.4.1 (November 15th, 2023) + +- updated token proto for expires_in, last_login and user proto for last_access +- updated all dependencies + +### 1.4.0 (September 19th, 2023) + +- up node version and dependencies + +### 1.3.0 (September 19th, 2023) + +- up deps (made all fields optional in proto files) + +### 1.2.5 (July 21st, 2023) + +- up deps + +### 1.2.4 (July 21st, 2023) + +- up deps + +### 1.2.3 (July 13th, 2023) + +- fix typo for encoding buffer field handler type + +### 1.2.2 (July 13th, 2023) + +- support nested buffer fields and also multiple bufferfields per entity, dateTime stamp field handlers +- updated depeendencies + +### 1.2.1 (June 19th, 2023) + +- extended resource base to support for nested buffer fields +- updated depeendencies + +### 1.2.0 (May 31st, 2023) + +- updated dependencies (includes chassis-srv updates for updated ArangoJs), pluralize protos and meta owner changes + +### 1.1.1 (October 14th, 2022) + +- updated dependencies + +### 1.1.0 (October 5th, 2022) + +- integrated full text search +- updated deps + +### 1.0.0 (August 25th, 2022) + +- migrated to fully-typed grpc-client and server +- Up deps + +### 0.2.15 (July 7th, 2022) + +- Up deps + +### 0.2.14 (June 28th, 2022) + +- added empty check for filter + +### 0.2.13 (June 28th, 2022) + +- fixed nested filter to json conversion +- fixed logger messages +- up deps + +### 0.2.12 (May 27th, 2022) + +- up dependencies + +### 0.2.11 (April 1st, 2022) + +- fix date time for traversal field entities + +### 0.2.10 (April 1st, 2022) + +- added special field handlers based on dateTime field config + +### 0.2.9 (March 23rd, 2022) + +- add empty check condition comparing to undefined (fix for isEmpty filter operation) + +### 0.2.8 (March 4th, 2022) + +- removed empty check condition (fix for isEmpty filter operation) + +### 0.2.7 (February 18th, 2022) + +- updated chassis-srv (includes fix for offset store config) + +### 0.2.6 (February 11th, 2022) + +- updated dependencies + +### 0.2.5 (February 7th, 2022) + +- fix for strategy for fields for updated redis + +### 0.2.4 (January 28th, 2022) + +- remove bluebird and updated redis + +### 0.2.3 (January 28th, 2022) + +- fix traversal for changes in proto structure and updated tests +- updated dependencies + +### 0.2.2 (December 22nd, 2021) + +- updated RC dependencies + +### 0.2.1 (December 9th, 2021) + +- updated dependencies + +### 0.2.0 (August 4th, 2021) + +- updated create (to remove edgeDef creation) and delete method to match new proto structure response +- generate status array for create and update operations and up tests +- generate status array for upsert and improve error handling +- added status array for delete response +- filter structure changes +- updated grpc-client for tests, fix for filter handling (enum mapping), added error array to all tests + +### 0.1.1 (May 18th, 2021) + +- improved logging + +### 0.1.0 (April 27th, 2021) + +#### Contains breaking changes! + +- switch to kafkajs +- change config format for events +- updated dependencies + +### 0.0.9 (March 19th, 2021) + +- fix create and update to support inbound edges +- updated depencies + +### 0.0.8 (March 12th, 2021) + +- changes to the graph traversal streaming API + +### 0.0.7 (February 11th, 2021) + +- updated dependencies + +### 0.0.6 (November 18th, 2020) + +- renamed fields _id, _rev, _key in graph proto +- updated dependencies + +### 0.0.5 (August 19th, 2020) + +- updated RC dependencies + +### 0.0.4 (July 8th, 2020) + +- updated grpc-client, kafka-client and other dependencies + +### 0.0.3 (June 23rd, 2020) + +- fix for read operation when filter is array + +### 0.0.2 (June 10th, 2020) + +- Updated dependencies +- fix for null check when decoding the strcut value (in case of $Or operator its possible that not all values are present) + +### 0.0.1 (January 29th, 2020) + +Initial share. diff --git a/packages/resource-base-interface/README.md b/packages/resource-base-interface/README.md new file mode 100644 index 00000000..98dc827f --- /dev/null +++ b/packages/resource-base-interface/README.md @@ -0,0 +1,231 @@ +# resource-base-interface + +[![Version][version]](https://www.npmjs.com/package/@restorecommerce/resource-base-interface)[![Build Status][build]](https://img.shields.io/github/actions/workflow/status/restorecommerce/resource-base-interface/build.yaml?style=flat-square)[![Dependencies][depend]](https://david-dm.org/restorecommerce/resource-base-interface)[![Coverage Status][cover]](https://coveralls.io/github/restorecommerce/resource-base-interface?branch=master) + +[version]: http://img.shields.io/npm/v/@restorecommerce/resource-base-interface.svg?style=flat-square +[build]: https://img.shields.io/github/actions/workflow/status/restorecommerce/resource-base-interface/build.yaml?style=flat-square +[depend]: https://img.shields.io/david/restorecommerce/resource-base-interface.svg?style=flat-square +[cover]: http://img.shields.io/coveralls/restorecommerce/resource-base-interface/master.svg?style=flat-square + +https://github.com/restorecommerce/notification-srv/actions/workflows/build.yaml[image:https://img.shields.io/github/actions/workflow/status/restorecommerce/notification-srv/build.yaml?style=flat-square[Build Status]] + +The `resource-base-interface` describes resource CRUD operations which can be bound to a service. Such operations are described via a [gRPC](https://grpc.io/docs/) interface with the message structures therefore being defined using [Protocol Buffers](https://developers.google.com/protocol-buffers/). This interface can be bound with any protobuf definition as long as it contains the endpoints defined in the [resource-base.proto](https://github.com/restorecommerce/protos/blob/master/io/restorecommerce/resource_base.proto) file (note that any resource message structure can be defined). + +The exposed gRPC methods are implemented by the `ServiceBase` object which uses a `ResourceAPI` instance to perform operations with a database provider. The exposed interface is therefore agnostic to a specific database implementation. +However, a valid database provider is required. A set of such providers is implemented in [chassis-srv](https://github.com/restorecommerce/chassis-srv/). +This interface emits resource-related messages to [Apache Kafka](https://kafka.apache.org) which can be enabled or disabled at the `ServiceBase`'s constructor. + + +Methods for managing and traversing graph databases are supported for the [`ArangoDB provider`](https://docs.arangodb.com/3.3/HTTP/Gharial/) + +## gRPC Interface + +This interface describes the following gRPC endpoints for a generic resource of type `Resource`. + +`io.restorecommerce.resourcebase.Resource` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| id | string | optional | identifier for the resource | +| meta | io.restorecommerce.meta.Meta meta | optional | Meta information common to all Restore Commerce resources | +| value | number | optional | value for the resource | +| text | string | optional | textual data for the resource | + +### Create + +This operation is used for inserting resources to the database. +Requests are performed by providing a list of resources which are returned in the response. A [`meta`](https://github.com/restorecommerce/protos/blob/master/io/restorecommerce/meta.proto) should be present, containing relevant resource ownership information. Timestamps for creation and modification are then appended automatically to this property upon a `Create` request. +The resource is stored as a normal collection document by default. +If there is a [graph configuration](test/cfg/config.json#L11) specified for the resource then it is stored as a vertex collection along with the edge definitions provided in the configuration. + +`io.restorecommerce.resourcebase.ResourceList` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| items | [ ] `io.restorecommerce.resourcebase.Resource` | required | list of resources | +| total_count | number | optional | total number of resources | + +### Read + +This operation returns resources based on provided filter and options. +Requests are performed using `io.restorecommerce.resourcebase.ReadRequest` and responses are a list of resources. + +`io.restorecommerce.resourcebase.ReadRequest` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| offset | number | optional | offset of the resource | +| limit | number | optional | limit, default value is `1000` | +| filters | `io.restorecommerce.filter.FilterOp` | optional | nested filters based on field values, multiple filters can be combined with `AND` and `OR` operators | +| sorts | [ ]`io.restorecommerce.resourcebase.Sort` | optional | sort the resources | +| fields | [ ] `io.restorecommerce.resourcebase.FieldFilter` | optional | fields selector, list of fields to be included or excluded, by default we get all the fields | +| search | [ ]string | optional | word search, not yet implemeneted | +| locales_limiter | [ ]string | optional | querying based on locales, not yet implemented | + +`io.restorecommerce.filter.FilterOp` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| filters | `io.restorecommerce.filter.Filter` | optional | nested filters | +| operator | enum | optional | operator `and`, or `or` | + +`io.restorecommerce.graph.Filter` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| field | string | required | filter based on field | +| operation | enum | optional | operator `eq`, `lt`, `lte`, `gt`, `gte`, `isEmpty`, `ilike`, `in`, `neq`, default value is `eq` | +| value | string | required | filter based on value | +| type | enum | optional | value type `STRING`, `NUMBER`, `BOOLEAN`, `DATE` or `ARRAY`, default value is `STRING` | +| filters | [ ] `io.restorecommerce.filter.FilterOp` | required | nested filters | + +`io.restorecommerce.resourcebase.Sort` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| field | string | optional | field to be sorted upon | +| SortOrder | enum | optional | sorting order, `UNSORTED`, `ASCENDING` or `DESCENDING` | + +`io.restorecommerce.resourcebase.FieldFilter` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| name | string | optional | field name | +| include | bool | optional | include or exclude field | + +### Update + +This operation is used for updating resources in the database. +Requests are performed by providing a list of resources and all updated items are returned within the response. Note that the only required properties on each resource are its `id` and the properties which are meant to be modified. +It is possible to specify in the configuration multiple edge definitions for one vertex. These edges are automatically updated when vertex documents are updated. + +### Upsert + +This operation is used for updating resources in the database or creating them if they do not exist. +Requests are performed by providing a resource list, which is returned in the response. + +### Delete + +This operation is used for deleting resources in the database. +Requests are performed using `io.restorecommerce.resourcebase.DeleteRequest` and responses are `google.protobuf.Empty` messages. +If a graph vertex is deleted, all connected edges are also deleted. + +`io.restorecommerce.resourcebase.DeleteRequest` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| collection | string | optional | Name of the target collection | +| ids | [ ]string | optional | List of resource identifiers to be deleted; if empty or not provided, the whole collection is truncated | + +### Traversal + +This operation is used for traversing graph resource in the database. +Requests are performed using `io.restorecommerce.graph.TraversalRequest` and respone is `io.restorecommerce.graph.TraversalResponse` message. + +`io.restorecommerce.graph.TraversalRequest` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| vertices | [ ]`io.restorecommerce.graph.Vertices` | optional | list of vertices for traversal | +| collection | `io.restorecommerce.graph.Collection` | optional | collection data for traversal | +| opts | `io.restorecommerce.graph.Options` | optional | List of options for graph traversal | +| path | bool | optional | if set to `true` only the traversed paths are returned | +| subject | `io.restorecommerce.auth.Subject` | required | Subject details | +| filters | `io.restorecommerce.graph.Filters` | optional | filters | + +`io.restorecommerce.graph.Vertices` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| collection_name | string | required | collection name | +| start_vertex_ids | [ ] string | required | list of start vertex ids | + +`io.restorecommerce.graph.Collection` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| collection_name | string | required | collection name | +| limit | [ ] number | optional | limit | +| sorts | [ ] `io.restorecommerce.resourcebase.Sort` | optional | sorting based on fields | + +`io.restorecommerce.graph.Filters` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| entity | string | optional | entity name | +| edge | string | optional | edge name | +| filters | `io.restorecommerce.graph.Filter` | optional | Filter | +| operator | enum | optional | operator, `and` or `or`, default is `and` | + +`io.restorecommerce.graph.Options` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| include_vertexs | [ ] string | optional | list of vertex's to be included in traversal | +| exclude_vertexs | [ ] string | optional | list of vertex's to be excluded in traversal | +| include_edges | [ ] string | optional | list of edge's to be included in traversal | +| exclude_edges | [ ] string | optional | list of edge's to be excluded in traversal | +| direction | enum | optional | direction of traversal, `OUTBOUND` or `INBOUND`, default is `OUTBOUND` | + +`io.restorecommerce.graph.TraversalResponse` + +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +| paths | `google.protobuf.Any` | required | buffered data, contains the list of visited paths | +| data | `google.protobuf.Any` | required | buffered data, contains all the data from the visited vertices | +| operation_status | `io.restorecommerce.status.OperationStatus` | required | operation status | + +## Kafka Events + +A kafka [`Topic`](https://github.com/restorecommerce/kafka-client/blob/master/src/events/provider/kafka/index.ts) can be provided when instantiating a `ServiceBase`. If `enableEvents` is set to true, a list of events is then emitted to Kafka by this microservice for each document of each CRUD request : +- Created +- Read +- Modified +- Deleted + +The events emitted to Kafka can be used for restoring the system in case of failure by implementing a [command-interface](https://github.com/restorecommerce/chassis-srv/blob/master/command-interface.md) in the used microservice. For usage details please see [command-interface tests](https://github.com/restorecommerce/chassis-srv/blob/master/test/command_test.ts). + +## Fields Configuration + +It is possible to pass a fields [`configuration object`](test/cfg/config.json#L235) to `ResourceAPI` in order to enable some special field handlers. + +### Field Generators + +The `strategies` property can be used to specify fields within each resource which should be generated automatically. Such autogeneration feature currently includes UUIDs, timestamps and sequential counters. The latter one is particularly useful for fields such as a customer or an item number, which can have a type of sequential logic. In these cases, a [Redis](https://redis.io/) database is used to generate and read these values efficiently. + +### Buffer Fields + +Buffer-encoded fields can be decoded before being stored in the database. It is possible to specify within the `bufferFields` property what fields of each resource should be specially handled this way. The values are also encoded into a buffer again when read from the database. + +### Required Fields + +It is possible to specify which fields are required for each document of each resource on the `requiredFields` config. +An `InvalidArgument` error is thrown if one of these fields is missing when attempting to store a document. + +## Development + +### Tests + +See [tests](test/). To execute the tests a set of _backing services_ are needed. +Refer to [System](https://github.com/restorecommerce/system) repository to start the backing-services before running the tests. + +- To run tests + +```sh +npm run test +``` + +## Usage + +- Install dependencies + +```sh +npm install +``` + +- Build + +```sh +# compile the code +npm run build +``` diff --git a/packages/resource-base-interface/eslint.config.mjs b/packages/resource-base-interface/eslint.config.mjs new file mode 100644 index 00000000..b5d8bfe7 --- /dev/null +++ b/packages/resource-base-interface/eslint.config.mjs @@ -0,0 +1,22 @@ +// @ts-check + +import eslint from '@eslint/js'; +import { RuleTester } from 'eslint'; +import tseslint from 'typescript-eslint'; + +const rules = tseslint.config( + eslint.configs.recommended, + ...tseslint.configs.recommended, +); + +rules.push( + { + "rules": { + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/no-unused-vars": "off", + "prefer-rest-params": "off", + } + } +); + +export default rules; diff --git a/packages/resource-base-interface/package.json b/packages/resource-base-interface/package.json new file mode 100644 index 00000000..6277160b --- /dev/null +++ b/packages/resource-base-interface/package.json @@ -0,0 +1,60 @@ +{ + "name": "@restorecommerce/resource-base-interface", + "version": "1.8.0", + "description": "Restorecommerce Resource Base Interface", + "main": "lib/index.js", + "author": "n-fuse GmbH", + "repository": { + "type": "git", + "url": "https://github.com/restorecommerce/resource-base-interface.git" + }, + "license": "MIT", + "keywords": [ + "restore", + "commerce", + "resource", + "base", + "interface" + ], + "type": "module", + "dependencies": { + "@restorecommerce/acs-client": "^3.0.23", + "@restorecommerce/chassis-srv": "^1.6.8", + "@restorecommerce/grpc-client": "^2.2.8", + "@restorecommerce/kafka-client": "1.2.36", + "@restorecommerce/rc-grpc-clients": "5.1.55", + "@restorecommerce/scs-jobs": "^0.1.63", + "@restorecommerce/service-config": "^1.1.3", + "lodash": "^4.17.21", + "nice-grpc-server-reflection": "^2.0.12", + "redis": "^4.7.0" + }, + "devDependencies": { + "@restorecommerce/dev": "^0.0.13", + "@types/lodash": "^4.17.5", + "@types/nconf": "^0.10.7", + "@types/redis": "^4.0.11", + "@vitest/coverage-v8": "^3.2.4", + "arangojs": "^8.7.0", + "cross-env": "^7.0.3", + "esbuild": "^0.25.9", + "eslint": "^8.57.0", + "npm-run-all": "^4.1.5", + "rimraf": "^6.0.1", + "should": "^13.2.3", + "typescript": "^5.6.3", + "vitest": "^3.2.4" + }, + "scripts": { + "test": "vitest run", + "lint": "eslint src --fix", + "build:es": "esbuild ./src/index.ts --bundle --platform=node --outfile=lib/index.cjs --minify --tree-shaking=true --sourcemap=inline", + "build:tsc": "tsc -d", + "build:tsc:watch": "tsc -d --watch", + "build:clean": "rimraf lib", + "build": "npm-run-all lint build:clean build:tsc" + }, + "engines": { + "node": ">= 20.0.0" + } +} diff --git a/packages/resource-base-interface/src/core/GraphResourcesServiceBase.ts b/packages/resource-base-interface/src/core/GraphResourcesServiceBase.ts new file mode 100644 index 00000000..8d7a6281 --- /dev/null +++ b/packages/resource-base-interface/src/core/GraphResourcesServiceBase.ts @@ -0,0 +1,238 @@ +import { type CallContext } from 'nice-grpc-common'; +import { + GraphDatabaseProvider, + TraversalResponse as DBTraversalResponse +} from '@restorecommerce/chassis-srv'; +import { Logger } from '@restorecommerce/logger'; +import { + OperationStatus +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/status.js'; +import { + DeepPartial, ServerStreamingMethodResult, + GraphServiceImplementation, + TraversalRequest, + TraversalResponse +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/graph.js'; +import { SortOrder } from './interfaces.js'; + +/** + * Graph Resource API base provides functions for graph Operations such as + * creating or modifying Vertices/Edges, graph traversal etc. + */ +export class GraphResourcesServiceBase implements GraphServiceImplementation { + bufferedCollections: any; + /** + * @constructor + * @param {object} db Chassis arangodb provider. + */ + constructor( + private readonly db: GraphDatabaseProvider, + private readonly bufferFiledCfg?: any, + private readonly logger?: Logger, + private readonly dateTimeFieldcfg?: any + ) { + if (bufferFiledCfg) { + this.bufferedCollections = []; + for (const key in bufferFiledCfg) { + // mapping of collection name to the property to be marshalled + this.bufferedCollections.push(key); + } + } + this.dateTimeFieldcfg = dateTimeFieldcfg; + } + + protected catchOperationError(msg: string, err: any): OperationStatus { + this.logger?.error(msg, err); + return { + code: Number.isInteger(err.code) ? err.code : 500, + message: err.message ?? 'Unknown Error!', + }; + } + + /** + * collection traversal - Performs a traversal starting from the given + * startVertex and following edges contained in this edge collection. + * + * The start_vertex can be either the _id of a document in the database, + * the _key of an edge in the collection, or a document + * (i.e. an object with an _id or _key property). + * opts contains the options such as opts.direction, opts.filter, opts.visitor, + * opts.init, opts.expander, opts.sort + */ + async* traversal( + request: TraversalRequest, + context: CallContext + ): ServerStreamingMethodResult> { + try { + const vertices = request?.vertices; + const collection = request?.collection; + const options = request?.opts; + if (!vertices && !collection) { + const message = 'missing start vertex or collection_name for graph traversal'; + this.logger?.error(message); + yield { + operation_status: { code: 400, message } + }; + return; + } + const filters = request?.filters; + const path = request?.path ? request.path : false; + let traversalCursor: DBTraversalResponse; + + if (collection?.sorts?.length) { + (collection as any).sorts = collection.sorts.reduce((a, s) => { + switch (s.order) { + case SortOrder.ASCENDING: + a[s.field] = 'ASC'; + break; + case SortOrder.DESCENDING: + a[s.field] = 'DESC'; + break; + case SortOrder.UNSORTED: + default: + break; + } + return a; + }, {} as Record); + } + + try { + this.logger?.debug('Calling traversal', { vertices, collection }); + traversalCursor = await this.db.traversal( + vertices, collection, + options, filters + ); + this.logger?.debug('Received traversal ArrayCursor from DB'); + } catch ({ message, details, stack }: any) { + yield { + operation_status: this.catchOperationError('Error executing DB Traversal', { message, details, stack }) + }; + return; + } + + const rootCursor = traversalCursor.rootCursor; + const associationCursor = traversalCursor.associationCursor; + // root entity data batches + if (rootCursor && rootCursor.batches) { + for await (const batch of rootCursor.batches) { + // root entity data, encoding before pushing batch + for (const elem of batch) { + if (elem._key) { + delete elem._key; + } + if (elem._rev) { + delete elem._rev; + } + } + yield ({ data: { value: Buffer.from(JSON.stringify(batch)) } }); + } + } + // association entity data batches + if (associationCursor && associationCursor.batches) { + for await (const batch of associationCursor.batches) { + const associationData = []; + const traversedPaths = []; + for (const data of batch) { + if (data.v._key) { + delete data.v._key; + } + if (data.v._rev) { + delete data.v._rev; + } + // convert `data.v` ie. vertex data for time fields conversion from ms to ISO string directly + const entityName = data.v._id.split('/')[0]; + if (this.dateTimeFieldcfg) { + for (const cfgEntityNames in this.dateTimeFieldcfg) { + if(cfgEntityNames === entityName) { + const dateTimeFields: string[] = this.dateTimeFieldcfg[entityName]; + dateTimeFields.forEach(e => { + if (e.includes('.')) { + this.updateJSON(e, data.v); + } else { + data.v[e] = new Date(data.v[e]).toISOString(); + } + }); + } + } + } + associationData.push(data.v); + if (path) { + traversedPaths.push(data.p); + } + } + + if (associationData.length) { + // associated entity data, encoding before pushing data + yield ({ data: { value: Buffer.from(JSON.stringify(associationData)) } }); + } + // paths + if (traversedPaths.length) { + // traversed paths, encoding before pushing paths + yield ({ paths: { value: Buffer.from(JSON.stringify(traversedPaths)) } }); + } + } + } + + yield ({ operation_status: { code: 200, message: 'success' } }); + this.logger?.debug('Traversal request ended'); + return; + } catch ({ message, details, stack }: any) { + yield { + operation_status: this.catchOperationError('Error caught executing traversal', { message, details, stack }) + }; + return; + } + } + + /** + * marshall the data + * + * @param document resource data + * @param bufferField property specified in config to be marshalled + * @return document + */ + marshallData(document: any, bufferField: any): any { + if (bufferField in document && document[bufferField]) { + const decodedMsg = document[bufferField]; + // convert the Msg obj to Buffer Obj + const encodedBufferObj = Buffer.from(JSON.stringify(decodedMsg)); + document[bufferField] = {}; + document[bufferField].value = encodedBufferObj; + } + return document; + } + + private updateJSON = (path: string, obj: any) => { + const fields = path.split('.'); + let result = obj; + let j = 0; + for (let i = 0, n = fields.length; i < n && result !== undefined; i++) { + const field = fields[i]; + if (i === n - 1) { + // reset value finally after iterating to the position (only if value already exists) + if (result[field]) { + result[field] = new Date(result[field]).toISOString(); + } + } else { + if (Array.isArray(result[field])) { + // till i < n concat new fields + let newField; + for (let k = i + 1; k < n; k++) { + if (newField) { + newField = newField + '.' + fields[k]; + } else { + newField = fields[k]; + } + } + for (; j < result[field].length; j++) { + // recurisve call to update each element if its an array + this.updateJSON(newField, result[field][j]); + } + } else { + // update object till final path is reached + result = result[field]; + } + } + } + }; +} diff --git a/packages/resource-base-interface/src/core/ResourcesAPI.ts b/packages/resource-base-interface/src/core/ResourcesAPI.ts new file mode 100644 index 00000000..8bec5a7c --- /dev/null +++ b/packages/resource-base-interface/src/core/ResourcesAPI.ts @@ -0,0 +1,558 @@ +import { type RedisClientType } from 'redis'; +import { randomUUID } from 'crypto'; +import { Logger } from '@restorecommerce/logger'; +import { Topic } from '@restorecommerce/kafka-client'; +import { + BaseDocument +} from './interfaces.js'; +import { + DatabaseProvider, + GraphDatabaseProvider +} from '@restorecommerce/chassis-srv'; +import { + Status, + OperationStatus, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/status.js'; +import { + DeepPartial, + Search +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/resource_base.js'; +import { Subject } from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/auth.js'; +import { fieldHandler, FieldHandlerType } from './utils.js'; + +// let redisClient: any; + +enum Strategies { + INCREMENT = 'increment', + UUID = 'uuid', + RANDOM = 'random', + TIMESTAMP = 'timestamp' +} + +const uuidGen = (): string => randomUUID().replace(/-/g, ''); + +/** + * Resource API base provides functions for CRUD operations. + */ +export class ResourcesAPIBase { + protected readonly bufferFields: string[]; + protected readonly requiredFields: any; + protected readonly timeStampFields: string[]; + protected readonly redisClient: RedisClientType; + + /** + * @constructor + * @param {object} db Chassis arangodb provider. + * @param {string} collectionName Name of database collection. + * @param {any} fieldHandlerConf The collection's field generators configuration. + */ + constructor( + protected readonly db: DatabaseProvider, + protected readonly collectionName: string, + fieldHandlerConf?: any, + protected readonly edgeCfg?: any, + protected readonly graphName?: string, + protected readonly logger?: Logger, + protected readonly resourceName?: string, + ) { + this.resourceName ??= collectionName.substring(0, collectionName.length - 1); + + if (!fieldHandlerConf) { + return; + } + + const strategyCfg = fieldHandlerConf?.strategies ?? []; + this.redisClient = fieldHandlerConf?.redisClient; + this.bufferFields = fieldHandlerConf?.bufferFields; + this.timeStampFields = fieldHandlerConf?.timeStampFields; + this.requiredFields = fieldHandlerConf?.requiredFields?.[this.resourceName] ?? fieldHandlerConf?.requiredFields; + + // values for Redis hash set + for (const field in strategyCfg) { + const strategy = strategyCfg[field].strategy; + this.redisClient.hSet(collectionName, field, strategy); + switch (strategy) { + case Strategies.INCREMENT: { + // check if value already exists in redis + let startingValue: any; + startingValue = this.redisClient.get(`${collectionName}:${field}`).then((val) => val); + if (!startingValue) { + if (strategyCfg[field].startingValue) { + startingValue = Number.isNaN(strategyCfg[field].startingValue) ? + '0' : strategyCfg[field].startingValue; + } + else { + startingValue = '0'; + } + this.redisClient.set(`${collectionName}:${field}`, startingValue).then((val) => val); + } + break; + } + default: + break; + } + } + } + + protected catchOperationError(msg: string, err: any): OperationStatus { + this.logger?.error(msg, err); + return { + code: Number.isInteger(err.code) ? err.code : 500, + message: err.message ?? 'Unknown Error!', + }; + } + + protected catchStatusError(msg: string, err: any): Status { + this.logger?.error(msg, err); + return { + code: Number.isInteger(err.code) ? err.code : 500, + message: err.message ?? 'Unknown Error!', + }; + } + + protected setMeta( + o: T & any, + subject: Subject, + create = false, + ): T { + o.meta ??= {}; + if (create) { + o.meta.created ??= new Date(); + o.meta.created_by ??= subject?.id; + } + o.meta.modified_by ??= subject?.id; + o.meta.modified ??= new Date(); + + if (!o.id?.length || o.id?.toString() === '0') { + o.id = uuidGen(); + } + return o; + } + + protected async setDefaults( + o: T & any, + collectionName: string, + subject: Subject, + create = false, + ): Promise { + if (create && this.redisClient) { + const values = await this.redisClient.hGetAll(collectionName); + + if (values) { + for (const field in values) { + const strategy = values[field]; + switch (strategy) { + case Strategies.INCREMENT: { + const key = `${collectionName}:${field}`; + o[field] = await this.redisClient.get(key); + await this.redisClient.incr(key); + break; + } + case Strategies.TIMESTAMP: + o[field] = (await this.redisClient.time()).getTime(); + break; + default: + case Strategies.UUID: + case Strategies.RANDOM: + o[field] = uuidGen(); + break; + } + } + } + } + return this.setMeta(o, subject, create); + } + + /** + * Finds documents based on provided filters and options + * @param {object} filter key value filter using mongodb/nedb filter format. + * @param {number} limit + * @param {number} offset + * @param {object} sort key value, key=field value: 1=ASCENDING, -1=DESCENDING, 0=UNSORTED + * @param {object} fields key value, key=field value: 0=exclude, 1=include + * @returns {an Object that contains an items field} + */ + async read( + filter: any = {}, + limit = 1000, + offset = 0, + sort: any = {}, + fields: any = {}, + customQueries: string[] = [], + customArgs: any = {}, + search: DeepPartial + ): Promise { + const options = { + limit: Math.min(limit, 1000), + offset, + sort, + fields, + customQueries, + customArguments: customArgs?.value ? JSON.parse(customArgs.value.toString()) : {}, + search + }; + let entities: T[] = await this.db.find(this.collectionName, filter, options); + entities = this.encodeOrDecode(entities, this.bufferFields, 'encode'); + entities = this.encodeOrDecode(entities, this.timeStampFields, 'convertMilisecToDateObj'); + return entities; + } + + /** + * Inserts documents to the database. + * + * @param {array.object} documents + */ + async create( + documents: BaseDocument[], + subject: Subject, + events?: Topic, + ): Promise { + const collection = this.collectionName; + const result = new Array(); + // check if all the required fields are present + if (this.requiredFields) { + documents = this.checkRequiredFields( + this.requiredFields, + documents, + result + ); + } + + documents = await Promise.all(documents.map( + async (doc) => await this.setDefaults(doc, collection, subject, true) + )); + documents = this.encodeOrDecode(documents, this.bufferFields, 'decode'); + documents = this.encodeOrDecode(documents, this.timeStampFields, 'convertDateObjToMilisec'); + + if (this.isGraphDB(this.db)) { + const db = this.db; + await db.addVertexCollection(collection); + const createVertexResp = await this.db.createVertex(collection, documents); + await Promise.all(documents.map(async document => { + try { + for (const eachEdgeCfg of this.edgeCfg) { + const fromIDkey = eachEdgeCfg.from; + const from_id = document[fromIDkey]; + const toIDkey = eachEdgeCfg.to; + const to_id = document[toIDkey]; + // edges are created outbound, if it is inbound - check for direction + const inbound = eachEdgeCfg.direction === 'inbound'; + const fromVerticeName = inbound ? eachEdgeCfg.fromVerticeName : collection; + const toVerticeName = inbound ? collection : eachEdgeCfg.toVerticeName; + + const ids = Array.isArray(to_id) ? to_id : [to_id]; + if (from_id && to_id) { + for (const id of ids) { + await db.createEdge( + eachEdgeCfg.edgeName, + null, + `${fromVerticeName}/${from_id}`, + `${toVerticeName}/${id}`, + ); + } + } + } + } + catch (error: any) { + result.push({ + error: true, + errorNum: error?.code, + errorMessage: error?.details ?? error?.message + }); + } + })); + if (Array.isArray(createVertexResp)) { + result.push(...createVertexResp); + } else { + result.push(createVertexResp); + } + } + else { + const inserts = await this.db.insert(collection, documents); + result.push(...inserts); + } + + this.encodeOrDecode(result, this.timeStampFields, 'convertMilisecToDateObj'); + if (events) { + await Promise.all(result?.map(async (item: any) => { + if (!item?.error) { + await events.emit(`${this.resourceName}Created`, item); + } + })); + } + this.encodeOrDecode(result, this.bufferFields, 'encode'); + return result; + } + + private isGraphDB(db: DatabaseProvider): db is GraphDatabaseProvider { + return !!this.edgeCfg; + } + + /** + * Check if a resource's required fields are present. + * @param requiredFields + * @param documents + */ + checkRequiredFields(requiredFields: string[], documents: BaseDocument[], errors: BaseDocument[]) { + const valid = documents.filter((document) => { + return requiredFields.every((field) => { + if (document[field] === undefined || (Array.isArray(document[field]) && document[field].length === 0)) { + errors.push({ + id: document.id, + error: true, + errorNum: 400, + errorMessage: `Field ${field} is necessary for ${this.resourceName} in document ${document.id}` + }); + return false; + } + return true; + }); + }); + return valid; + } + + /** + * Removes documents found by id. + * + * @param [array.string] ids List of document IDs. + */ + async delete( + ids: string[], + events?: Topic, + ): Promise { + let response: any[]; + if (!Array.isArray(ids)) { + ids = [ids]; + } + if (this.isGraphDB(this.db)) { + // Modify the Ids to include documentHandle + if (ids.length > 0) { + ids = ids?.map((id) => `${this.collectionName}/${id}`); + response = await this.db.removeVertex(this.collectionName, ids); + } + } + else { + response = await this.db.delete(this.collectionName, ids); + } + + if (events) { + await Promise.all(response?.map(async (id) => { + if (id && !id?.error) { + await events?.emit(`${this.resourceName}Deleted`, typeof id === 'string' ? { id }: id); + } + })); + } + return response; + } + + /** + * Delete all documents in the collection. + */ + async deleteCollection(events?: Topic): Promise { + await this.db.truncate(this.collectionName); + if (this.isGraphDB(this.db)) { + const db = this.db; + const edges: any[] = await db.getGraphDB().get().then( + (info: any) => info.edgeDefinitions + ); + await Promise.all( + edges?.filter( + edge => Object.values(edge).flatMap( + edge => edge + ).includes(this.collectionName) + ).map( + edge => db.truncate(edge.collection) + ) ?? [] + ); + } + + if (events) { + await events?.emit(`${this.resourceName}DeletedAll`, { collection: this.collectionName }); + } + } + + /** + * Upserts documents. + * + * @param [array.object] documents + */ + async upsert( + documents: T[], + subject: Subject, + events?: Topic, + ): Promise { + const createDocuments = new Array(); + const updateDocuments = new Array(); + const orgs = new Set( + await this.db.find( + this.collectionName, + { + _key: { + $in: [...new Set(documents?.map(doc => doc.id).filter(id => id))], + }, + }, + { + fields: { + id: 1 + } + } + ).then( + (resp: any[]) => resp.map(doc => doc.id) + ) + ); + + documents?.forEach((doc) => { + if (orgs.has(doc?.id)) { + // update + updateDocuments.push(doc); + } + else { + // insert + createDocuments.push(doc); + } + }); + + const results = new Array(); + if (updateDocuments?.length > 0) { + await this.update(updateDocuments, subject, events).then( + updates => results.push(...updates) + ); + } + + if (createDocuments?.length > 0) { + await this.create(createDocuments, subject, events).then( + creates => results.push(...creates) + ); + } + return results; + } + + /** + * Finds documents by id and updates them. + * + * @param [array.object] documents + * A list of documents or partial documents. Each document must contain an id field. + */ + async update( + documents: T[], + subject: Subject, + events?: Topic, + ): Promise { + documents = documents.map( + (doc) => this.setMeta(doc, subject) + ); + documents = this.encodeOrDecode(documents, this.bufferFields, 'decode'); + documents = this.encodeOrDecode(documents, this.timeStampFields, 'convertDateObjToMilisec'); + documents = await Promise.all(documents.map(async (doc) => { + try { + if (this.isGraphDB(this.db)) { + const db = this.db; + await Promise.all(this.edgeCfg.map(async (edgeCfg: any) => { + const to_id = doc[edgeCfg.to!]; + const from_id = doc[edgeCfg.from!]; + const edgeCollectionName = edgeCfg.edgeName!; + + // delete and recreate only if there is a difference in references + if (edgeCfg.direction === 'inbound' && from_id) { + const from_ids: string[] = Array.isArray(from_id) ? from_id : [from_id]; + // if (!from_ids?.length) return; + if (typeof to_id !== 'string') throw Error('Inbound value `to` has to be a single string!'); + + const fromVerticeName = edgeCfg.fromVerticeName!; + const toVerticeName = edgeCfg.toVerticeName! ?? this.collectionName; + const incoming: any = await db.getInEdges(edgeCollectionName, `${fromVerticeName}/${to_id}`); + + // Remove edges that are no longer defined + if (Array.isArray(incoming.edges)) { + await Promise.all(incoming.edges?.filter( + (edge: any) => !from_ids.includes(edge._from) + ).map( + (edge: any) => db.removeEdge(edgeCollectionName, edge._id) + )); + } + + // Create new edges + await Promise.all(from_ids.filter( + id => !incoming.edges?.includes(id) + ).map( + id => db.createEdge( + edgeCfg.edgeName, + null, + `${fromVerticeName}/${from_id}`, + `${toVerticeName}/${id}`, + ) + )); + } + else if (to_id) { + const to_ids: string[] = Array.isArray(to_id) ? to_id : [to_id]; + // if (!to_ids?.length) return; + if (typeof from_id !== 'string') throw Error('Outbound value `from` has to be a single string!'); + + const fromVerticeName = edgeCfg.fromVerticeName! ?? this.collectionName; + const toVerticeName = edgeCfg.toVerticeName!; + const outgoing: any = await db.getOutEdges(edgeCollectionName, `${fromVerticeName}/${from_id}`); + + // Remove edges that are no longer defined + if (Array.isArray(outgoing.edges)) { + await Promise.all(outgoing.edges?.filter( + (edge: any) => !to_ids.includes(edge._to) + ).map( + (edge: any) => db.removeEdge(edgeCollectionName, edge._id) + )); + } + + // Create new edges + await Promise.all(to_ids.filter( + id => !outgoing.edges?.includes(id) + ).map( + id => db.createEdge( + edgeCfg.edgeName, + null, + `${fromVerticeName}/${from_id}`, + `${toVerticeName}/${id}`, + ) + )); + } + })); + } + return doc; + } + catch ({ code, message, details, stack, name, cause }: any) { + this.logger?.error(`Error updating document ${doc.id}`, { code, message, details, stack, name, cause }); + return { + ...doc, + error: true, + errorNum: Number.isInteger(code) ? code : 500, + errorMessage: `On graph update: ${details ?? message}` + }; + } + })); + + const errors = documents.filter(doc => doc.error); + const updates = documents.filter(doc => !doc.error); + const results = await this.db.update(this.collectionName, updates); + results.push(...errors); + this.encodeOrDecode(results, this.timeStampFields, 'convertMilisecToDateObj'); + if (events) { + await Promise.all(results?.map(async (item: any) => { + if (!item.error) { + await events.emit(`${this.resourceName}Modified`, item); + } + })); + } + this.encodeOrDecode(results, this.bufferFields, 'encode'); + return results; + } + + private encodeOrDecode(documents: T, fieldPaths: string[], mode: FieldHandlerType): T { + const arr = Array.isArray(documents) ? documents : [documents]; + if (fieldPaths?.length && arr?.length) { + for (const doc of arr) { + for (const fieldPath of fieldPaths) { + fieldHandler(doc, fieldPath, mode); + } + } + } + return documents; + } +} diff --git a/packages/resource-base-interface/src/core/ServiceBase.ts b/packages/resource-base-interface/src/core/ServiceBase.ts new file mode 100644 index 00000000..2beea9fd --- /dev/null +++ b/packages/resource-base-interface/src/core/ServiceBase.ts @@ -0,0 +1,370 @@ +import { type CallContext } from 'nice-grpc-common'; +import { BaseDocument, toObject } from '../index.js'; +import { ResourcesAPIBase } from './ResourcesAPI.js'; +import { Topic } from '@restorecommerce/kafka-client'; +import { Logger } from '@restorecommerce/logger'; +import { + DeepPartial, + DeleteRequest, + DeleteResponse, + ReadRequest, + ResourceList, + ResourceListResponse, + ResourceResponse, + ServiceImplementation, + Sort_SortOrder +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/resource_base.js'; +import { OperationStatus, Status } from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/status.js'; + +export type ElementOf = T extends Array ? E : T; + +// Mapping of arangodb error codes to standard HTTP error codes +const ArangoHttpErrCodeMap: Record = { + 1210: 409, // ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED + 1200: 409, // ERROR_ARANGO_CONFLICT + 1201: 400, // ERROR_ARANGO_DATADIR_INVALID + 1202: 404, // ERROR_ARANGO_DOCUMENT_NOT_FOUND + 1203: 404, // ERROR_ARANGO_DATA_SOURCE_NOT_FOUND + 1204: 400, // ERROR_ARANGO_COLLECTION_PARAMETER_MISSING + 1205: 400, // ERROR_ARANGO_DOCUMENT_HANDLE_BAD + 1207: 409, // ERROR_ARANGO_DUPLICATE_NAME + 1228: 404, // ERROR_ARANGO_DATABASE_NOT_FOUND +}; + +export type StatusCodes = { + [K in keyof T]?: Status +}; + +export type OperationStatusCodes = { + [K in keyof T]?: OperationStatus +}; + +export const ServiceBaseStatusCodes = { + SUCCESS: { + code: 200, + message: 'success', + }, +}; +export type ServiceBaseStatusCodes = StatusCodes; + +export const ServiceBaseOperationStatusCodes = { + SUCCESS: { + code: 200, + message: 'success', + }, + MULTI_STATUS: { + code: 207, + message: 'Multi status - response may include errors!', + }, +}; +export type ServiceBaseOperationStatusCodes = OperationStatusCodes; + +/** + * A microservice chassis ready class which provides endpoints for + * CRUD resource operations. + */ +export class ServiceBase implements ServiceImplementation { + private status_codes: StatusCodes; + private operation_status_codes: OperationStatusCodes; + + protected get statusCodes(): ServiceBaseStatusCodes { + this.status_codes ??= { ...ServiceBaseStatusCodes }; + return this.status_codes; + } + + protected set statusCodes(value: StatusCodes) { + Object.assign(this.statusCodes, value); + } + + protected get operationStatusCodes(): ServiceBaseOperationStatusCodes { + this.operation_status_codes ??= { ...ServiceBaseOperationStatusCodes }; + return this.operation_status_codes; + } + + protected set operationStatusCodes(value: OperationStatusCodes) { + Object.assign(this.operationStatusCodes, value); + } + + /** + * @constructor + * @param name entityName Name of the resource. + * @param events entityEvents Event topic for the resource. + * @param logger logger Chassis logger?. + * @param resourceapi resourceapi ResourceAPI object. + * @param isEventsEnabled. + */ + constructor( + public readonly name: string, + public readonly events?: Topic, + public readonly logger?: Logger, + public readonly resourceapi?: ResourcesAPIBase, + public isEventsEnabled?: boolean, + ) {} + + /** + * Endpoint read. + * Return resources based on provided filter and options. + */ + async read(request: ReadRequest, context: CallContext): Promise> { + try { + const sort = request.sorts?.reduce( + (a, s) => { + switch (s.order) { + default: + case Sort_SortOrder.ASCENDING: + a[s.field] = 'ASC'; + break; + case Sort_SortOrder.DESCENDING: + a[s.field] = 'DESC'; + break; + } + return a; + }, {} as Record + ); + + const limit = request.limit; + const offset = request.offset; + const filter = request.filters?.length ? toObject(request) : {}; + const field = {} as Record; + request.fields?.forEach((f) => { + if (f.include) { + field[f.name] = 1; + return; + } + field[f.name] = 0; + }); + const customQueries = request.custom_queries; + const customArgs = request.custom_arguments; + const search = request?.search; + const objectEntities = (await this.resourceapi.read['payload']>( + filter, + limit, + offset, + sort, + field, + customQueries, + customArgs, + search + )) ?? []; + + const readResponseWithStatus = objectEntities.map((object) => ({ + payload: object, + status: { + ...this.statusCodes.SUCCESS, + id: object.id, + }, + })); + + return { + items: readResponseWithStatus, + total_count: readResponseWithStatus.length, + operation_status: this.operationStatusCodes.SUCCESS + } as DeepPartial; + } + catch ({ code, message, details, stack, name, cause }: any) { + this.logger?.error('Error caught while processing read request:', { code, message, details, stack, name, cause }); + return { + operation_status: { + code: Number.isInteger(code) ? code : 500, + message: details ?? message + } + } as DeepPartial; + } + } + + private generateStatusResponse( + responseItems: BaseDocument[], + ): DeepPartial[] { + if (!Array.isArray(responseItems)) { + responseItems = [responseItems]; + } + const statusArray = responseItems.map((item) => { + if (item.error) { + const code = ArangoHttpErrCodeMap[item.errorNum] ?? item.errorNum; + return { + id: item.id, + code: Number.isInteger(code) ? code : 500, + message: item.errorMessage, + }; + } else { + return { + ...this.statusCodes.SUCCESS, + id: item.id, + }; + } + }); + return statusArray; + } + + private generateResponseWithStatus( + responseItems: BaseDocument[] + ): DeepPartial[] { + if (!Array.isArray(responseItems)) { + responseItems = responseItems ? [responseItems] : []; + } + const responseItemsWithStatus = responseItems.map((item) => { + if (item.error) { + const code = ArangoHttpErrCodeMap[item.errorNum] ?? item.errorNum; + return { + status: { + id: item.id, + code: Number.isInteger(code) ? code : 500, + message: item.errorMessage, + } + }; + } else { + return { + payload: item, + status: { + ...this.statusCodes.SUCCESS, + id: item.id, + } + }; + } + }); + return responseItemsWithStatus; + } + + private generateResourceResponseList(items: ResourceResponse[]): DeepPartial { + if (items.some(item => item.status?.code !== 200)) { + return { + items, + total_count: items.length ?? 0, + operation_status: this.operationStatusCodes.MULTI_STATUS, + } as DeepPartial; + } + else { + return { + items, + total_count: items.length ?? 0, + operation_status: this.operationStatusCodes.SUCCESS, + } as DeepPartial; + } + } + + /** + * Endpoint create. + * Inserts resources. + */ + async create(request: M, context: CallContext): Promise> { + try { + const createResponse = await this.resourceapi.create( + request.items, + request.subject, + this.isEventsEnabled && this.events + ); + const items = this.generateResponseWithStatus(createResponse); + const docs = this.generateResourceResponseList(items); + this.logger?.info(this.name + ' create response', docs); + return docs; + } + catch ({ code, message, details, stack, name, cause }: any) { + this.logger?.error('Error caught while processing create request:', { code, message, details, stack, name, cause }); + return { + operation_status: { + code: Number.isInteger(code) ? code : 500, + message: details ?? message + } + } as DeepPartial; + } + } + + /** + * Endpoint delete. + * Removes resources specified by id or all resources. + */ + async delete(request: DeleteRequest, context: CallContext): Promise> { + try { + let docs: any[]; + if (request.collection) { + await this.resourceapi.deleteCollection(this.isEventsEnabled && this.events); + this.logger?.info(`${this.name} deleted`); + docs = [{ + id: request.collection, + }] + } else { + docs = await this.resourceapi.delete(request.ids, this.isEventsEnabled && this.events); + } + + docs?.forEach((doc) => { + if (doc._id && doc._key && doc._rev) { + doc.id = doc._key; + delete doc._id; + delete doc._key; + delete doc._rev; + } + }); + const status = docs?.length ? this.generateStatusResponse(docs) : undefined; + + return { + status, + operation_status: status?.some(status => status.code !== 200) + ? this.operationStatusCodes.MULTI_STATUS + : this.operationStatusCodes.SUCCESS + }; + } + catch ({ code, message, details, stack, name, cause }: any) { + this.logger?.error('Error caught while processing delete request:', { code, message, details, stack, name, cause }); + return { + operation_status: { + code: Number.isInteger(code) ? code : 500, + message: details ?? message + } + } as DeepPartial; + } + } + + /** + * Endpoint update. + * Updates resources. + */ + async update(request: M, context: CallContext): Promise> { + try { + const updateResponse = await this.resourceapi.update( + request.items, + request.subject, + this.isEventsEnabled && this.events, + ); + const items = this.generateResponseWithStatus(updateResponse); + const docs = this.generateResourceResponseList(items); + this.logger?.info(this.name + ' update response', docs); + return docs as DeepPartial; + } + catch ({ code, message, details, stack, name, cause }: any) { + this.logger?.error('Error caught while processing update request:', { code, message, details, stack, name, cause }); + return { + operation_status: { + code: Number.isInteger(code) ? code : 500, + message: details ?? message + } + } as DeepPartial; + } + } + + /** + * Endpoint upsert. + * Upserts resources. + */ + async upsert(request: M, context: CallContext): Promise> { + try { + const upsertResponse = await this.resourceapi.upsert( + request.items, + request.subject, + this.isEventsEnabled && this.events, + ); + const items = this.generateResponseWithStatus(upsertResponse); + const docs = this.generateResourceResponseList(items); + this.logger?.info(`${this.name} upsert response`, { items: upsertResponse }); + return docs as DeepPartial; + } + catch ({ code, message, details, stack, name, cause }: any) { + this.logger?.error('Error caught while processing upsert request:', { code, message, details, stack, name, cause }); + return { + operation_status: { + code: Number.isInteger(code) ? code : 500, + message: details ?? message + } + } as DeepPartial; + } + } +} diff --git a/packages/resource-base-interface/src/core/index.ts b/packages/resource-base-interface/src/core/index.ts new file mode 100644 index 00000000..f26b7d35 --- /dev/null +++ b/packages/resource-base-interface/src/core/index.ts @@ -0,0 +1,4 @@ +export * from './ResourcesAPI.js'; +export * from './ServiceBase.js'; +export * from './GraphResourcesServiceBase.js'; +export * from './interfaces.js'; \ No newline at end of file diff --git a/packages/resource-base-interface/src/core/interfaces.ts b/packages/resource-base-interface/src/core/interfaces.ts new file mode 100644 index 00000000..40cbf08a --- /dev/null +++ b/packages/resource-base-interface/src/core/interfaces.ts @@ -0,0 +1,34 @@ +import { + Resource +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/resource_base.js'; + +export { + Resource, + ResourceList, + ReadRequest, + Filter, + FilterOp, + Filter_ValueType as FilterValueType, + Filter_Operation as FilterOperation, + FilterOp_Operator as OperatorType, + Sort_SortOrder as SortOrder, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/resource_base.js'; + +export { + Meta as DocumentMetadata +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/meta.js'; + +export { + Options_Direction as Direction, + Options as TraversalOptions, + Filter as GraphFilter, + Filters as GraphFilters, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/graph.js'; + +export type BaseDocument = Resource & Record; + +export interface DateTimeConfig { + fields: string[]; + entities: string[]; +} + diff --git a/packages/resource-base-interface/src/core/utils.ts b/packages/resource-base-interface/src/core/utils.ts new file mode 100644 index 00000000..cdc05f79 --- /dev/null +++ b/packages/resource-base-interface/src/core/utils.ts @@ -0,0 +1,89 @@ +import * as _ from 'lodash'; + +export type FieldHandlerType = 'encode' | 'decode' | 'convertDateObjToMilisec' | 'convertMilisecToDateObj'; + +const marshallObj = (val: any) => { + return { + type_url: '', + value: Buffer.from(JSON.stringify(val)) + }; +}; + +const updateObject = ( + obj: any, + path: string, + value: any, + fieldHandlerType: FieldHandlerType +) => { + if (value !== undefined) { + switch (fieldHandlerType) { + case 'encode': + _.set(obj, path, marshallObj(value)); + break; + case 'decode': + _.set(obj, path, JSON.parse(value.value?.toString())); + break; + case 'convertDateObjToMilisec': + if (value instanceof Date) { + _.set(obj, path, value.getTime()); + } + break; + case 'convertMilisecToDateObj': + if (typeof(value) === 'number') { + _.set(obj, path, new Date(value)); + } + break; + default: + break; + } + } +}; + +const setNestedPath = (object: any, fieldPath: string, fieldHandlerType: FieldHandlerType) => { + const prefix = fieldPath?.substring(0, fieldPath.indexOf('.[')); + const suffix = fieldPath?.substring(fieldPath.indexOf('].') + 2); + const setRecursive = suffix.includes('.['); + if (prefix && suffix) { + const array = _.get(object, prefix); + array?.forEach((obj: any) => { + const fieldExists = _.get(obj, suffix); + if (fieldExists) { + updateObject(obj, suffix, fieldExists, fieldHandlerType); + } + // recursive call + if (fieldExists && setRecursive) { + setNestedPath(obj, suffix, fieldHandlerType); + } + }); + } +}; + +const baseGet = (object: any, path: string[]): any => { + let index = 0; + const length = path.length; + while (object != null && index < length) { + object = object[path[index++]]; + } + return (index && index == length) ? object : undefined; +}; + +export const fieldHandler = (obj: any, fieldPath: string, fieldHandlerType: FieldHandlerType): any => { + // fieldList contains the split Path to individual fields for fieldPath + // and the baseGet breaks when the first field do not exist + // ex: if fieldPath is `a.[0].b.c` then dotFieldPath is `a.0.b.c` + let dotFieldPath: any = fieldPath.split('.[').join('.'); + dotFieldPath = dotFieldPath.split('].').join('.'); + dotFieldPath = dotFieldPath.split('.'); + const array = fieldPath.includes('['); + + const fieldExists = baseGet(obj, dotFieldPath); + // only if the configured field exist check recursively for all entries in object + if (array) { + // use setNestedPath + setNestedPath(obj, fieldPath, fieldHandlerType); + } else if (fieldExists) { + // use normal set and return + updateObject(obj, fieldPath, fieldExists, fieldHandlerType); + } + return obj; +}; \ No newline at end of file diff --git a/packages/resource-base-interface/src/experimental/AccessControlledServiceBase.ts b/packages/resource-base-interface/src/experimental/AccessControlledServiceBase.ts new file mode 100644 index 00000000..cd97ede1 --- /dev/null +++ b/packages/resource-base-interface/src/experimental/AccessControlledServiceBase.ts @@ -0,0 +1,362 @@ +import { + type CallContext, +} from 'nice-grpc-common'; +import { type ServiceConfig } from '@restorecommerce/service-config'; +import { type Logger } from '@restorecommerce/logger'; +import { type DatabaseProvider } from '@restorecommerce/chassis-srv'; +import { Topic } from '@restorecommerce/kafka-client'; +import { + ACSClientContext, + AuthZAction, + DefaultACSClientContextFactory, + Operation, + ResourceFactory, + access_controlled_function, + access_controlled_service, + injects_meta_data, + resolves_subject, +} from '@restorecommerce/acs-client'; +import { + DeepPartial, + type DeleteRequest, + DeleteResponse, + Filter_Operation, + Filter_ValueType, + ReadRequest, + type ResourceList, + type ResourceListResponse, + ResourceResponse, + ServiceImplementation, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/resource_base.js'; +import { + type Subject, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/auth.js'; +import { + OperationStatusCodes, + ResourcesAPIBase, + ServiceBase, + ServiceBaseOperationStatusCodes, + ServiceBaseStatusCodes, + StatusCodes, +} from '../core/index.js'; + +export const ACSContextFactory = async ( + self: AccessControlledServiceBase, + request: I & DeleteRequest, + context: any +): Promise => { + const ids = request.ids ?? request.items?.map((item: any) => item.id); + const resources = await self.get(ids, request.subject, context); + return { + ...context, + subject: request.subject, + resources: [ + ...resources.items ?? [], + ...request.items ?? [], + ], + }; +}; + +export const DefaultResourceFactory = ( + ...resourceNames: string[] +): ResourceFactory => async ( + self: any, + request: T, + context?: CallContext, +) => (resourceNames?.length ? resourceNames : [self.name])?.map( + resourceName => ({ + resource: resourceName, + id: request.items?.map((item: any) => item.id) + }) +); + +export const AccessControlledServiceBaseOperationStatusCodes = { + ...ServiceBaseOperationStatusCodes, + LIMIT_EXHAUSTED: { + code: 500, + message: 'Query limit 1000 exhausted!', + }, +}; +export type AccessControlledServiceBaseOperationStatusCodes = OperationStatusCodes; + +@access_controlled_service +export class AccessControlledServiceBase + extends ServiceBase + implements ServiceImplementation +{ + protected override get statusCodes(): ServiceBaseStatusCodes { + return super.statusCodes; + } + + protected override set statusCodes(value: StatusCodes) { + super.statusCodes = value; + } + + protected override get operationStatusCodes(): AccessControlledServiceBaseOperationStatusCodes { + return super.operationStatusCodes; + } + + protected override set operationStatusCodes(value: OperationStatusCodes) { + super.operationStatusCodes = value; + } + + constructor( + resourceName: string, + topic: Topic, + db: DatabaseProvider, + cfg: ServiceConfig, + logger?: Logger, + enableEvents?: boolean, + collectionName?: string, + ) { + collectionName ??= resourceName + 's'; + const fieldHandlers = cfg.get('fieldHandlers'); + fieldHandlers.bufferedFields = fieldHandlers.bufferedFields?.flatMap( + (item: any) => typeof(item) === 'string' + ? item + : item.entities?.includes(collectionName) + ? item.fields + : item.entities + ? [] + : item.fields + ); + fieldHandlers.timeStampFields = fieldHandlers.timeStampFields?.flatMap( + (item: any) => typeof(item) === 'string' + ? item + : item.entities?.includes(collectionName) + ? item.fields + : item.entities + ? [] + : item.fields + ); + const graph = cfg.get('graph'); + super( + resourceName, + topic, + logger, + new ResourcesAPIBase( + db, + collectionName, + fieldHandlers, + graph?.vertices?.[collectionName], + graph?.name, + logger, + resourceName + ), + enableEvents, + ); + this.operationStatusCodes = { + ...AccessControlledServiceBaseOperationStatusCodes, + ...cfg?.get('operationStatusCodes') + }; + } + + protected catchStatusError(e?: any, item?: T): T { + item ??= {} as T; + const { + code, + title, + message, + details, + } = e ?? {}; + item.status = { + id: item?.payload?.id, + code: Number.isInteger(code) ? code : 500, + message: message ? [ + title, + message, + details, + ].filter(s => s).join('; ') : 'Unknwon Error!' + }; + this.logger?.warn(e?.stack ?? item.status.message, item); + return item; + } + + protected catchOperationError(e?: any, response?: T): T { + response ??= {} as T; + const { + code, + title, + message, + details, + } = e ?? {}; + response.operation_status = { + code: Number.isInteger(code) ? code : 500, + message: message ? [ + title, + message, + details, + ].filter(s => s).join('; ') : 'Unknwon Error!' + }; + this.logger?.error(e?.stack ?? response.operation_status.message, response); + return response; + } + + protected async superRead( + request: ReadRequest, + context?: CallContext, + ): Promise> { + return await super.read(request, context); + } + + protected async superCreate( + request: I, + context?: CallContext, + ): Promise> { + return await super.create( + request, + context, + ); + } + + protected async superUpdate( + request: I, + context?: CallContext, + ): Promise> { + return await super.update( + request, + context, + ); + } + + protected async superUpsert( + request: I, + context?: CallContext, + ): Promise> { + return await super.upsert( + request, + context, + ); + } + + protected async superDelete( + request: DeleteRequest, + context?: CallContext, + ): Promise { + return await super.delete( + request, + context, + ); + } + + public async get( + ids: string[], + subject?: Subject, + context?: CallContext, + bypassACS = false, + ): Promise> { + ids = [...new Set(ids)].filter(id => id); + if (ids.length > 1000) { + throw this.operationStatusCodes.LIMIT_EXHAUSTED; + } + + if (ids.length === 0) { + const response = { + total_count: 0, + operation_status: this.operationStatusCodes.SUCCESS, + }; + return response as DeepPartial; + } + + const request = ReadRequest.fromPartial({ + filters: [{ + filters: [{ + field: '_key', + operation: Filter_Operation.in, + value: JSON.stringify(ids), + type: Filter_ValueType.ARRAY + }] + }], + limit: ids.length, + subject + }); + if (bypassACS) { + return await this.superRead(request, context); + } + else { + return await this.read(request, context); + } + } + + @resolves_subject() + @injects_meta_data() + @access_controlled_function({ + action: AuthZAction.CREATE, + operation: Operation.isAllowed, + context: ACSContextFactory, + resource: DefaultResourceFactory(), + database: 'arangoDB', + useCache: true, + }) + public override async create( + request: I, + context?: CallContext + ): Promise> { + return await this.superCreate(request, context); + } + + @access_controlled_function({ + action: AuthZAction.READ, + operation: Operation.whatIsAllowed, + context: DefaultACSClientContextFactory, + resource: DefaultResourceFactory(), + database: 'arangoDB', + useCache: true, + }) + public override async read( + request: ReadRequest, + context?: CallContext, + ): Promise> { + return await this.superRead(request, context); + } + + @resolves_subject() + @injects_meta_data() + @access_controlled_function({ + action: AuthZAction.MODIFY, + operation: Operation.isAllowed, + context: ACSContextFactory, + resource: DefaultResourceFactory(), + database: 'arangoDB', + useCache: true, + }) + public override async update( + request: I, + context?: CallContext, + ): Promise> { + return await this.superUpdate(request, context); + } + + @resolves_subject() + @injects_meta_data() + @access_controlled_function({ + action: AuthZAction.MODIFY, + operation: Operation.isAllowed, + context: ACSContextFactory, + resource: DefaultResourceFactory(), + database: 'arangoDB', + useCache: true, + }) + public override async upsert( + request: I, + context?: CallContext, + ): Promise> { + return await this.superUpsert(request, context); + } + + @resolves_subject() + @access_controlled_function({ + action: AuthZAction.DELETE, + operation: Operation.isAllowed, + context: ACSContextFactory, + resource: DefaultResourceFactory(), + database: 'arangoDB', + useCache: true, + }) + public override async delete( + request: DeleteRequest, + context?: CallContext, + ): Promise { + return this.superDelete(request, context); + } +} diff --git a/packages/resource-base-interface/src/experimental/ClientRegister.ts b/packages/resource-base-interface/src/experimental/ClientRegister.ts new file mode 100644 index 00000000..e7a9b99a --- /dev/null +++ b/packages/resource-base-interface/src/experimental/ClientRegister.ts @@ -0,0 +1,64 @@ +import { type ServiceConfig } from '@restorecommerce/service-config'; +import { type Logger } from '@restorecommerce/logger'; +import { + Client, + GrpcClientConfig, + createChannel, + createClient, +} from '@restorecommerce/grpc-client'; +import { CompatServiceDefinition } from 'nice-grpc'; + +export type CRUDServiceDefinition = CompatServiceDefinition & { + methods: { + create: any; + read: any; + update: any; + upsert: any; + delete: any; + }; +}; + +export class ClientRegister { + protected static readonly GLOBAL_REGISTER: Map> = new Map>(); + + constructor( + protected readonly cfg: ServiceConfig, + protected readonly logger: Logger, + protected readonly register: Map> = ClientRegister.GLOBAL_REGISTER, + ) {} + + public get( + definition: T + ): Client { + if (this.register.has(definition.fullName.toString())) { + return this.register.get(definition.fullName.toString()); + } + + const config = this.cfg.get( + `client:${definition.name}` + ) ?? Object.values( + this.cfg.get(`client`) ?? [] + )?.find( + (client: any) => ( + client.fullName === definition.fullName + || client.name === definition.name + ) + ); + + if (!config) { + throw new Error(`Config for ${definition.fullName.toString()} is missing!`); + } + + const client = createClient( + { + ...config, + logger: this.logger, + } as GrpcClientConfig, + definition, + createChannel(config.address) + ); + + this.register.set(definition.fullName.toString(), client); + return client; + } +} \ No newline at end of file diff --git a/packages/resource-base-interface/src/experimental/Pipe.ts b/packages/resource-base-interface/src/experimental/Pipe.ts new file mode 100644 index 00000000..a6418832 --- /dev/null +++ b/packages/resource-base-interface/src/experimental/Pipe.ts @@ -0,0 +1,9 @@ +export class Pipe { + constructor( + public readonly value: I, + ) {} + + public then(fn: (v: I) => O){ + return new Pipe(fn(this.value)); + } +} \ No newline at end of file diff --git a/packages/resource-base-interface/src/experimental/ResourceAggregator.ts b/packages/resource-base-interface/src/experimental/ResourceAggregator.ts new file mode 100644 index 00000000..6ede3966 --- /dev/null +++ b/packages/resource-base-interface/src/experimental/ResourceAggregator.ts @@ -0,0 +1,202 @@ + +import { type ServiceConfig } from '@restorecommerce/service-config'; +import { type Logger } from '@restorecommerce/logger'; +import { + Resource, + ResourceList, + ResourceListResponse, + Filter_ValueType, + Filter_Operation, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/resource_base.js'; +import { + type CallContext, +} from 'nice-grpc-common'; +import { Subject } from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/auth.js'; +import { + ResourceMap, + OnMissingCallback, + DEFAULT_STRICT_CALLBACK, +} from './ResourceMap.js'; +import { + ClientRegister, + CRUDServiceDefinition +} from './ClientRegister.js'; + +export type Aggregation = T & C; +export type ResolverParams> = [string, Map, M?, T?]; +export type ArrayResolverParams> = [string, Map, M[]?, T[]?]; +export type ElementOf = T extends Array ? E : T; +export type ResolverMap = { + [K in keyof T]?: ResolverParams | ArrayResolverParams | T[K] +} & {}; +export type ResolvedNode = T extends ResolverParams + ? ( + T[2] extends object + ? Resolved + : T[3] + ) + : M extends object + ? Resolved + : T; +export type Resolved = { + [K in keyof T]?: T[K] extends object + ? ResolvedNode + : T[K] +}; + +export const Resolver = ( + search_key: string, + source: Map, + map?: M, +): ResolverParams => [ + search_key, + source, + map, + {} as T, +]; + +export const ArrayResolver = ( + search_key: string, + source: Map, + map?: M, +): ArrayResolverParams => [ + search_key, + source, + [map], + [] as T[] +]; + +export class ResourceAggregator { + constructor( + protected readonly cfg: ServiceConfig, + protected readonly logger: Logger, + protected readonly register = new ClientRegister(cfg, logger), + ) {} + + public async getByIds( + ids: string | string[], + service: CRUDServiceDefinition, + subject?: Subject, + context?: CallContext, + ) { + ids = [...new Set( + [ids].flatMap( + id => id + ).filter( + Boolean + ) + )]; + const request = ids?.length ? { + filters: [{ + filters: [ + { + field: '_key', + operation: Filter_Operation.in, + value: JSON.stringify(ids), + type: Filter_ValueType.ARRAY, + } + ] + }], + limit: ids.length, + subject, + } : undefined; + const client = this.register.get(service) as any; + const response = request && await client.read(request, context); + const map = new ResourceMap( + response?.items?.map( + (item: any) => item.payload + ), + service?.name?.toString() + ); + return map; + } + + public async aggregate( + target: T, + sources: { + service: CRUDServiceDefinition; + map_by_ids: (target: T) => string[]; + container: string; + entity?: string; + }[], + template?: C, + subject?: Subject, + context?: CallContext, + strict: OnMissingCallback = DEFAULT_STRICT_CALLBACK, + ): Promise> { + const ids = sources.map( + source => source.map_by_ids(target) + ); + const source_map = await Promise.all( + sources.map( + (source, i) => this.getByIds( + ids[i] ?? [], + source.service, + subject, + context, + ) + ) + ); + const aggregation = Object.assign( + target, + ...sources.map((source, i) => ({ + [source.container]: new ResourceMap( + source_map[i].getMany( + ids[i]?.flatMap(ids => ids) ?? [], + strict + ), + source.entity + ) + })), + ) as Aggregation; + return aggregation; + } +} + +export function resolve( + entity: T, + resolverMap?: M, +): Resolved; +export function resolve( + entity: T[], + resolverMap?: M[], +): Resolved[] { + if (!entity) { + return; + } + else if (Array.isArray(entity)) { + return entity.map(value => resolve(value, resolverMap[0])); + } + else { + const copy = { ...(entity as any) }; + return Object.assign( + copy, + ...Object.entries(resolverMap ?? {}).map( + ([k, r]) => { + const id = typeof r?.[0] === 'string' && copy[r[0]]; + if (!id) { + return { + [k]: r?.[2] ? resolve(copy[k], r[2]) : resolve(copy[k], r) + }; + } + else if (Array.isArray(id)) { + return { + [k]: id.map( + id => r[2] + ? resolve(r[1]?.get(id.toString()), r[2]) + : r[1]?.get(id.toString()) + ) + }; + } + else if (typeof id === 'string') { + return { + [k]: r[2] + ? resolve(r[1]?.get(id), r[2]) + : r[1]?.get(id) + }; + } + } + ).filter(e => e) + ); + } +} \ No newline at end of file diff --git a/packages/resource-base-interface/src/experimental/ResourceAwaitQueue.ts b/packages/resource-base-interface/src/experimental/ResourceAwaitQueue.ts new file mode 100644 index 00000000..e4f3aa13 --- /dev/null +++ b/packages/resource-base-interface/src/experimental/ResourceAwaitQueue.ts @@ -0,0 +1,44 @@ + +export type ResourceAwaitFunc = (resource?: T) => void; + +export type ResourceAwaitMutex = { + resolve: ResourceAwaitFunc; + reject: (error?: any) => void; +}; + +export class ResourceAwaitQueue extends Map> { + public override set(key: K, value: ResourceAwaitMutex) { + if (this.has(key)) { + value.reject(new Error('Resource in use!')); + } + else { + return super.set(key, value); + } + } + + public async await(key: K, timeout?: number) { + return new Promise( + (resolve, reject) => { + this.set(key, { resolve, reject }); + if (timeout) { + setTimeout( + () => reject({ + code: 500, + message: `Event ${key} did not respond before timeout of ${timeout}ms!` + }), timeout + ); + } + } + ); + } + + public resolve(key: K, resource?: T) { + this.get(key)?.resolve(resource); + this.delete(key); + } + + public reject(key: K, error?: any) { + this.get(key)?.reject(error); + this.delete(key); + } +} \ No newline at end of file diff --git a/packages/resource-base-interface/src/experimental/ResourceMap.ts b/packages/resource-base-interface/src/experimental/ResourceMap.ts new file mode 100644 index 00000000..e3141ef4 --- /dev/null +++ b/packages/resource-base-interface/src/experimental/ResourceMap.ts @@ -0,0 +1,80 @@ +import { + Resource, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/resource_base.js'; +import { + Status, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/status.js'; + + +export type OnMissingCallback = (id?: string, entity?: string) => any; + +export const DEFAULT_STRICT_CALLBACK: OnMissingCallback = ( + id?: string, + entity?: string +) => { + throw new Error(`Resource missing: { id: ${id}, entity: ${entity} }!`); +}; + +export const DEFAULT_STATUS_CALLBACK: OnMissingCallback = ( + id?: string, + entity?: string +): Status => ({ + id, + code: 404, + message: `${entity ?? 'Entity'} ${id} is missing!` +}); + +export class ResourceMap extends Map { + protected _all?: T[]; + + public get all() { + this._all = this._all ?? [...this.values()]; + return this._all; + } + + constructor( + items?: T[], + public readonly entity = items[0]?.constructor?.name, + ) { + super(items?.filter( + item => item, + ).map( + item => [item.id, item] + )); + } + + public override set(key: string, value: T) { + delete this._all; + return super.set(key, value); + } + + public override clear() { + delete this._all; + return super.clear(); + } + + public override delete(key: string) { + delete this._all; + return super.delete(key); + } + + public override get( + id: string, + onMissing: OnMissingCallback = DEFAULT_STRICT_CALLBACK + ): T { + if (id && onMissing && !this.has(id)) { + const error = onMissing(id, this.entity); + if (error) { + throw error; + } + } + return super.get(id); + } + + public getMany( + ids: string[], + onMissing: OnMissingCallback = DEFAULT_STRICT_CALLBACK + ): T[] { + return ids?.map(id => this.get(id, onMissing)); + } +} \ No newline at end of file diff --git a/packages/resource-base-interface/src/experimental/WorkerBase.ts b/packages/resource-base-interface/src/experimental/WorkerBase.ts new file mode 100644 index 00000000..07f6bf23 --- /dev/null +++ b/packages/resource-base-interface/src/experimental/WorkerBase.ts @@ -0,0 +1,415 @@ +import { type ServiceImplementation } from 'nice-grpc'; +import { type CompatServiceDefinition } from 'nice-grpc/lib/service-definitions'; +import { type RedisClientType, createClient } from 'redis'; +import { + Server, + OffsetStore, + database, + buildReflectionService, + Health, + DatabaseProvider, + CommandInterface, +} from '@restorecommerce/chassis-srv'; +import { + Events, + Topic, + registerProtoMeta +} from '@restorecommerce/kafka-client'; +import { Arango } from '@restorecommerce/chassis-srv/lib/database/provider/arango/base.js'; +import { createLogger, type Logger } from '@restorecommerce/logger'; +import { BindConfig } from '@restorecommerce/chassis-srv/lib/microservice/transport/provider/grpc/index.js'; +import { + ProtoMetadata, + protoMetadata as CommandInterfaceMeta, + CommandInterfaceServiceDefinition, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/commandinterface.js'; +import { + protoMetadata as JobMeta +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/job.js'; +import { HealthDefinition } from '@restorecommerce/rc-grpc-clients/dist/generated-server/grpc/health/v1/health.js'; +import { ServerReflectionService } from 'nice-grpc-server-reflection'; +import { + createServiceConfig, + type ServiceConfig +} from '@restorecommerce/service-config'; +import { initAuthZ } from '@restorecommerce/acs-client'; +import { runWorker } from '@restorecommerce/scs-jobs'; +import { ServiceBase } from '../index.js'; + +export type ReflectionService = ServiceImplementation; +export type EventHandler = (msg: any, context?: any, config?: any, eventName?: string) => Promise; +export interface ServiceBindConfig extends BindConfig { + name: string; + meta: ProtoMetadata; +} + +export abstract class WorkerBase { + private _cfg: ServiceConfig; + private _logger: Logger; + private _server: Server; + private _db: DatabaseProvider; + private _events: Events; + private _offsetStore: OffsetStore; + private _reflectionService: ReflectionService; + private _commandInterface: CommandInterface; + + get cfg() { + return this._cfg; + } + + protected set cfg(value: ServiceConfig) { + this._cfg = value; + } + + get logger() { + return this._logger; + } + + protected set logger(value: Logger) { + this._logger = value; + } + + get server() { + return this._server; + } + + protected set server(value: Server) { + this._server = value; + } + + get db() { + return this._db; + } + + protected set db(value: DatabaseProvider) { + this._db = value; + } + + get offsetStore() { + return this._offsetStore; + } + + protected set offsetStore(value: OffsetStore) { + this._offsetStore = value; + } + + get events() { + return this._events; + } + + protected set events(value: Events) { + this._events = value; + } + + get commandInterface() { + return this._commandInterface; + } + + protected set commandInterface(value: CommandInterface) { + this._commandInterface = value; + } + + get reflectionService() { + return this._reflectionService; + } + + protected set reflectionService(value: ReflectionService) { + this._reflectionService = value; + } + + protected readonly services = new Map | ServiceBase | CommandInterface>(); + protected readonly topics = new Map(); + protected readonly eventHandlers = new Map(); + protected readonly jobHandler: ServiceImplementation = { + handleQueuedJob: (msg: any, context: any, config?: any, eventName?: string) => { + return this.eventHandlers.get(msg?.type)(msg?.data?.payload, context, config, msg?.type).then( + () => this.logger?.info(`Job ${msg?.type} done.`), + ({ code, message, details, stack }: any) => this.logger?.error( + `Job ${msg?.type} failed:`, + { code, message, details, stack } + ) + ); + } + }; + + /** + * Override this factory function and return a list of ServiceBindConfig[]. + * Each ServiceBindConfig expects a ServiceBase implementation, + * a ServiceDefinition and a ProtoMetadata. + * Worker.start() binds all configured services to the server instance. + */ + protected abstract initServices(): Promise[]>; + + protected async bindServices(configs: ServiceBindConfig[]) { + this.logger?.verbose('bind Services'); + const serviceNames = this.cfg.get('serviceNames'); + configs.forEach( + config => { + this.logger?.debug('bind Service:', serviceNames?.[config.name] ?? config.name); + this.services.set(serviceNames?.[config.name] ?? config.name, config.implementation); + } + ); + await Promise.all( + configs.map( + config => this.server.bind( + serviceNames?.[config.name] ?? config.name, + config, + ) + ) + ); + } + + protected async bindCommandInterface(configs: ServiceBindConfig[]) { + this.logger?.verbose('bind CommandInterface'); + this.commandInterface = [...this.services.values()].find( + service => service instanceof CommandInterface + ) as CommandInterface; + + if (this.commandInterface) { + return; + } + + const serviceName = this.cfg.get('serviceNames:cis'); + if (!serviceName) { + this.logger?.warn( + 'CommandInterface not initialized', + 'serviceNames:cis for CommandInterface not set!', + ); + return; + } + + const redisConfig = this.cfg.get('redis'); + redisConfig.db = this.cfg.get('redis:db-indexes:db-subject'); + const redisClient: RedisClientType = createClient(redisConfig); + await redisClient.connect(); + + this.commandInterface = new CommandInterface( + this.server, + this.cfg, + this.logger, + this.events as any, + redisClient, + ); + this.services.set(serviceName, this.commandInterface); + configs.push( + { + name: serviceName, + service: CommandInterfaceServiceDefinition, + implementation: this.commandInterface, + meta: CommandInterfaceMeta, + } as ServiceBindConfig + ); + await this.server.bind( + serviceName, + { + service: CommandInterfaceServiceDefinition, + implementation: this.commandInterface, + } as BindConfig + ); + } + + protected async bindJobHandler() { + this.logger?.verbose('bind JobHandler'); + const serviceName = this.cfg.get('serviceNames:cis'); + if (!serviceName) { + this.logger?.warn( + 'JobHandler not initialized', + 'serviceNames:jobs for JobHandler not set!', + ); + return; + } + this.services.set(serviceName, this.jobHandler); + } + + protected async bindRefelctions(configs: ServiceBindConfig[]) { + this.logger?.verbose('bind ReflectionService'); + const serviceName = this.cfg.get('serviceNames:reflection'); + if (!serviceName) { + this.logger?.warn( + 'ReflectionService not initialized', + 'serviceNames:reflection for ReflectionService not set!', + ); + return; + } + + const metas = configs.map(config => config.meta); + registerProtoMeta( + ...metas + ); + + this.reflectionService = buildReflectionService( + metas.map( + meta => ({ + descriptor: meta.fileDescriptor as any + }) + ) + ); + + this.services.set(serviceName, this.reflectionService); + return this.server.bind( + serviceName, + { + service: ServerReflectionService, + implementation: this.reflectionService, + } + ); + } + + protected async bindHealthCheck() { + this.logger?.verbose('bind HealthCheckService'); + const name = this.cfg.get('serviceNames:health'); + + if (!name) { + this.logger?.warn( + 'HealthCheckService not initialized', + 'serviceNames:health for HealthCheckService not set!', + ); + return; + } + + if (!this.commandInterface) { + this.logger?.warn( + 'HealthCheckService not initialized', + 'CommandInterface missing!', + ); + return; + } + + await this.server.bind( + name, + { + service: HealthDefinition, + implementation: new Health( + this.commandInterface, + { + logger: this.logger, + cfg: this.cfg, + dependencies: [], + readiness: async () => !!await (this.db as Arango).db.version() + } + ) + } as BindConfig + ); + } + + protected bindHandler(serviceName: string, functionName: string) { + serviceName = this.cfg.get(`serviceNames:${serviceName}`) ?? serviceName; + this.logger?.debug(`Bind event to handler: ${serviceName}.${functionName}`); + return (msg: any, context: any, config: any, eventName: string): Promise => { + return (this.services.get(serviceName) as any)?.[functionName]?.(msg, context).then( + () => this.logger?.debug(`Event ${eventName} handled.`), + ({ code, message, details, stack }: any) => this.logger?.error( + `Error while handling event ${eventName}:`, + { code, message, details, stack } + ), + ) ?? this.logger?.warn( + `Event ${eventName} was not bound to handler: ${serviceName}.${functionName} does not exist!.` + ); + }; + } + + protected async bindEvents() { + this.logger?.verbose('bind Events'); + const serviceNames = this.cfg.get('serviceNames'); + const kafkaCfg = this.cfg.get('events:kafka'); + this.events = new Events(kafkaCfg, this.logger); + await this.events.start(); + this.offsetStore = new OffsetStore(this.events as any, this.cfg, this.logger); + + await Promise.all(Object.entries(kafkaCfg.topics).map(async ([key, value]: any[]) => { + const topicName = value.topic; + const topic = await this.events.topic(topicName); + const offsetValue = await this.offsetStore.getOffset(topicName); + this.logger?.verbose('subscribing to topic with offset value', topicName, offsetValue); + Object.entries(value.events as { [key: string]: string } ?? {}).forEach( + ([eventName, handler]) => { + const i = handler.lastIndexOf('.'); + const name = handler.slice(0, i); + const serviceName = serviceNames?.[name] ?? name; + const functionName = handler.slice(i+1); + this.eventHandlers.set(eventName, this.bindHandler(serviceName, functionName)); + topic.on( + eventName as string, + this.eventHandlers.get(eventName), + { startingOffset: offsetValue } + ); + } + ); + this.topics.set(key, topic); + })); + } + + protected async bindScheduledJobs() { + const job_config = this.cfg.get('scs-jobs'); + if (job_config) { + registerProtoMeta( + JobMeta + ); + await Promise.all(Object.values<{ import?: string }>(job_config)?.map( + async job => { + try { + if (job.import?.endsWith('.js') || job.import?.endsWith('.cjs')) { + const fileImport = await import(job.import); + if (fileImport?.default?.default) { + await fileImport.default.default(this.cfg, this.logger, this.events, runWorker); + } else { + await fileImport.default(this.cfg, this.logger, this.events, runWorker); + } + } + } + catch ({ code, message, details, stack }: any) { + this.logger?.error( + `Error scheduling external job ${job.import}`, + { code, message, details, stack } + ); + } + } + )); + } + } + + public async start( + cfg?: ServiceConfig, + logger?: Logger, + ): Promise { + this.cfg = cfg = cfg ?? createServiceConfig(process.cwd()); + const logger_cfg = cfg.get('logger'); + + if (logger) { + this.logger = logger; + } + else if (logger_cfg) { + logger_cfg.esTransformer = (msg: any) => { + msg.fields = JSON.stringify(msg.fields); + return msg; + }; + this.logger = logger = createLogger(logger_cfg); + } + + this.server = new Server(this.cfg.get('server'), this.logger); + this.db = await database.get(this.cfg.get('database:main'), this.logger); + + await this.bindJobHandler(); + await this.bindEvents(); + const serviceConfigs = await this.initServices(); + await this.bindServices(serviceConfigs); + await this.bindCommandInterface(serviceConfigs); + await this.bindHealthCheck(); + await this.bindRefelctions(serviceConfigs); + await this.bindScheduledJobs(); + + // start server + await initAuthZ(this.cfg); + await this.server.start(); + this.logger?.info('Server started successfully'); + } + + async stop(): Promise { + this.logger?.info('Shutting down'); + await Promise.allSettled([ + this.server?.stop(), + this.events?.stop(), + this.offsetStore?.stop(), + ]); + } +} diff --git a/packages/resource-base-interface/src/experimental/index.ts b/packages/resource-base-interface/src/experimental/index.ts new file mode 100644 index 00000000..9dc18f94 --- /dev/null +++ b/packages/resource-base-interface/src/experimental/index.ts @@ -0,0 +1,7 @@ +export * from './AccessControlledServiceBase.js'; +export * from './ClientRegister.js'; +export * from './ResourceAggregator.js'; +export * from './ResourceAwaitQueue.js'; +export * from './ResourceMap.js'; +export * from './WorkerBase.js'; +export * from './Pipe.js'; \ No newline at end of file diff --git a/packages/resource-base-interface/src/index.ts b/packages/resource-base-interface/src/index.ts new file mode 100644 index 00000000..aff80cfb --- /dev/null +++ b/packages/resource-base-interface/src/index.ts @@ -0,0 +1,182 @@ +import * as _ from 'lodash'; +import { Filter, FilterOperation, FilterValueType, OperatorType, ReadRequest } from './core/interfaces.js'; + +/* +const filterOperationMap = new Map([ + [0, 'eq'], + [1, 'lt'], + [2, 'lte'], + [3, 'gt'], + [4, 'gte'], + [5, 'isEmpty'], + [6, 'iLike'], + [7, 'in'], + [8, 'neq'] +]); +*/ + +const filterOperatorMap = new Map([ + [0, 'and'], + [1, 'or'] +]); + +const insertFilterFieldOpValue = (filter: Filter, object: any, key: string) => { + let value: any = undefined; + filter.type ??= FilterValueType.STRING; // defaults to string if undefined + switch (filter.type) { + case FilterValueType.NUMBER: + value = Number(filter.value); + break; + case FilterValueType.BOOLEAN: + if (filter.value === 'true') { + value = true; + } else if (filter.value === 'false') { + value = false; + } + break; + case FilterValueType.ARRAY: + try { + value = JSON.parse(filter.value); + } catch (err: any) { + // to handle JSON string parse error + if (err.message.includes('Unexpected token')) { + value = JSON.parse(JSON.stringify(filter.value)); + } else { + throw err; + } + } + break; + case FilterValueType.DATE: + value = (new Date(filter.value)).getTime(); + break; + default: + case FilterValueType.STRING: + value = filter.value; + break; + } + + object = key ? object[key] : []; + if (!Array.isArray(object)) { + throw new Error('Filter object has to be of type Array'); + } + filter.operation ??= FilterOperation.eq; // defaults to eq if undefined; + switch (filter.operation) { + case FilterOperation.eq: + object.push({ [filter.field]: value }); + break; + case FilterOperation.neq: + object.push({ [filter.field]: { $not: { $eq: value } } }); + break; + default: + object.push({ [filter.field]: { [`$${filter.operation}`]: value } }); + break; + } + return object; +}; + +/** + * Takes filter object containing field, operation and value and updates the filter in + * object with operator style understandable by chassis-srv for later to be used for + * AQL conversion + * @param object converted filter object + * @param originalKey operator value + * @param filter object containing field, operation, value and type + * @returns object + */ +const convertFilterToObject = (object: any, operatorKey: string, filter: Filter) => { + if (object !== null) { + if (Array.isArray(object)) { + for (const arrayItem of object) { + convertFilterToObject(arrayItem, operatorKey, filter); + } + } else if (typeof object === 'object') { + for (const key of Object.keys(object)) { + // Match found, update object with filter field, operation and value into object + if (key === operatorKey) { + object = insertFilterFieldOpValue(filter, object, operatorKey); + } else { + convertFilterToObject(object[key], operatorKey, filter); + } + } + } + } + if (!operatorKey) { + // should be root level filter + object = insertFilterFieldOpValue(filter, object, operatorKey); + object = object[0]; + } + return object; +}; + +/** + * convertToObject takes input contained in the proto structure defined in resource_base proto + * and converts it into Object understandable by the underlying DB implementation in chassis-srv + * @param {*} input Original filter input object + * @param {*} obj converted filter objected passed recursively + * @param {*} currentOperator current operatro value passed recursively + */ +export const convertToObject = (input: any, obj?: any, currentOperator?: string) => { + // since toObject method is called recursively we are not adding the typing to input parameter + let filters; + if (input && !_.isEmpty(input.filters)) { + filters = input.filters; + } else { + filters = input; + } + // by default use 'and' operator if no operator is specified + if (Array.isArray(filters?.filters) && !filters.operator) { + filters.operator = 'and'; + } + obj ??= {}; + if (Array.isArray(filters)) { + for (const filterObj of filters) { + let operatorValue; + if (typeof filterObj.operator === 'string' || filterObj.operator instanceof String) { + operatorValue = filterObj.operator; + } else if (Number.isInteger(filterObj.operator)) { + operatorValue = filterOperatorMap.get(filterObj.operator); + } + // default to and operator + if (!operatorValue) { + operatorValue = 'and'; + } + const newOperator = `$${operatorValue}`; + if (newOperator && !currentOperator) { + // insert obj with new operator + Object.assign(obj, { [newOperator]: [] }); + } + convertToObject(filterObj, obj, newOperator); + } + } else if (filters.field && (filters.operation || filters.operation === 0) && filters.value !== undefined) { + // object contains field, operation and value, update it on obj using convertFilterToObject() + obj = convertFilterToObject(obj, currentOperator, filters); + } else if (Array.isArray(filters?.filters)) { + for (const filterObj of filters.filters) { + const operator = filters.operator ? filters.operator : 'and'; + convertToObject(filterObj, obj, operator); + } + } + return obj; +}; + +/** + * converts input filters to json object understandable by chassis-srv for AQL conversion + * @param input input filters object + * @returns json object understandable by chassis-srv for AQL conversion + */ +export const toObject = (input: ReadRequest) => { + const filters = input.filters ?? []; + const result: Record[] = filters.map( + filter => { + const obj = filter.filters.map((sf) => convertToObject(sf, {})); + const operatorValue = filter?.operator ?? OperatorType.and; // defaults to `and` + return { + [`$${operatorValue}`]: obj + }; + } + ); + + return result.length === 1 ? result[0] : result; +}; + +export * from './core/index.js'; diff --git a/packages/resource-base-interface/test/cfg/config.json b/packages/resource-base-interface/test/cfg/config.json new file mode 100644 index 00000000..8897d2f6 --- /dev/null +++ b/packages/resource-base-interface/test/cfg/config.json @@ -0,0 +1,278 @@ +{ + "database": { + "testdb": { + "provider": "arango", + "host": "127.0.0.1", + "port": 8529, + "database": "resource-base-test", + "autoCreate": true, + "arangoSearch": [ + { + "collectionName": "resources", + "path": "test/views/resources_view.json" + } + ] + } + }, + "graph": { + "graphName": "testGraph", + "vertices": { + "persons": [ + { + "edgeName": "has", + "from": "id", + "to": "car_id", + "direction": "outbound", + "toVerticeName": "cars" + }, + { + "edgeName": "lives", + "from": "id", + "to": "state_id", + "direction": "outbound", + "toVerticeName": "states" + } + ], + "cars": [ + { + "edgeName": "belongs", + "from": "id", + "to": "place_id", + "direction": "outbound", + "toVerticeName": "places" + } + ], + "places": [ + { + "edgeName": "resides", + "from": "id", + "to": "state_id", + "direction": "outbound", + "toVerticeName": "states" + } + ], + "states": [ + {} + ] + }, + "edgeDefinitions": [ + { + "collection": "has", + "from": "persons", + "to": "cars" + }, + { + "collection": "belongs", + "from": "cars", + "to": "places" + }, + { + "collection": "resides", + "from": "places", + "to": "states" + }, + { + "collection": "lives", + "from": "persons", + "to": "states" + } + ] + }, + "client": { + "test": { + "address": "localhost:50151" + }, + "testBufferedService": { + "address": "localhost:50152" + }, + "graphsTestService": { + "address": "localhost:50151" + } + }, + "events": { + "testevents": { + "provider": "kafka", + "groupId": "restore-resource-base-interface-test", + "kafka": { + "clientId": "restore-resource-base-interface-test", + "brokers": [ + "localhost:29092" + ] + }, + "resourceCreated": { + "messageObject": "io.restorecommerce.resourcebase.Resource" + }, + "resourceModified": { + "messageObject": "io.restorecommerce.resourcebase.Resource" + }, + "resourceDeleted": { + "messageObject": "io.restorecommerce.resourcebase.Resource" + }, + "resourceDeletedAll": { + "messageObject": "io.restorecommerce.resourcebase.Resource" + }, + "testBufferedDataCreated": { + "messageObject": "test.TestBufferedData" + } + }, + "enableEvents": "true" + }, + "server": { + "logger": { + "console": { + "handleExceptions": false, + "level": "error", + "colorize": true, + "prettyPrint": true + } + }, + "services": { + "test": { + "read": { + "transport": [ + "pipeline" + ] + }, + "create": { + "transport": [ + "pipeline" + ] + }, + "delete": { + "transport": [ + "pipeline" + ] + }, + "update": { + "transport": [ + "pipeline" + ] + }, + "upsert": { + "transport": [ + "pipeline" + ] + } + }, + "testBufferedService": { + "read": { + "transport": [ + "pipeline" + ] + }, + "create": { + "transport": [ + "pipeline" + ] + }, + "delete": { + "transport": [ + "pipeline" + ] + } + }, + "graphsTestService": { + "traversal": { + "transport": [ + "pipeline" + ] + } + } + }, + "transports": [ + { + "name": "pipeline", + "provider": "grpc", + "addr": "localhost:50151" + } + ] + }, + "bufferedServer": { + "logger": { + "console": { + "handleExceptions": false, + "level": "silly", + "colorize": true, + "prettyPrint": true + } + }, + "services": { + "test": { + "read": { + "transport": [ + "pipeline" + ] + }, + "create": { + "transport": [ + "pipeline" + ] + }, + "delete": { + "transport": [ + "pipeline" + ] + }, + "update": { + "transport": [ + "pipeline" + ] + }, + "upsert": { + "transport": [ + "pipeline" + ] + } + }, + "testBufferedService": { + "read": { + "transport": [ + "pipeline" + ] + }, + "create": { + "transport": [ + "pipeline" + ] + }, + "delete": { + "transport": [ + "pipeline" + ] + } + }, + "graphsTestService": { + "traversal": { + "transport": [ + "pipeline" + ] + } + } + }, + "transports": [ + { + "name": "pipeline", + "provider": "grpc", + "addr": "localhost:50152" + } + ] + }, + "fieldHandlers": { + "fieldGenerators": {}, + "bufferFields": { + "testBufferedData": ["data"], + "resource": ["data"] + }, + "timeStampFields": [ + { + "fields": ["created", "meta.created", "meta.modified"], + "entities": ["resource"] + } + ], + "requiredFields": { + "resource": [ + "text" + ] + } + } +} \ No newline at end of file diff --git a/packages/resource-base-interface/test/crud.spec.ts b/packages/resource-base-interface/test/crud.spec.ts new file mode 100644 index 00000000..86a1d1ad --- /dev/null +++ b/packages/resource-base-interface/test/crud.spec.ts @@ -0,0 +1,878 @@ +import { ResourcesAPIBase, ServiceBase, toObject } from '../src/index.js'; +import * as chassis from '@restorecommerce/chassis-srv'; +import { Channel, createChannel, createClient } from '@restorecommerce/grpc-client'; +import { Events, registerProtoMeta, Topic } from '@restorecommerce/kafka-client'; +import { createServiceConfig } from '@restorecommerce/service-config'; +import * as should from 'should'; +import * as _ from 'lodash'; +import { + Filter_Operation, + Filter_ValueType, + ReadRequest, + protoMetadata as resourceProto, + Sort_SortOrder, +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/resource_base.js'; +import { + protoMetadata as testProto, + CRUDDefinition, + CRUDClient +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/test/test.js'; +import { FilterOp_Operator } from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/filter.js'; +import { it, describe, beforeAll, afterAll, beforeEach } from 'vitest'; + +registerProtoMeta( + resourceProto, + testProto +); + +/* + * Note: To run this test, a running ArangoDB and Kafka instance is required. + * (Kafka is needed only if 'events:enableEvents' config is enabled) + */ + +/* global describe it before after beforeEach */ +describe('converting to filter to object', () => { + it('should convert proto filter to valid DB filter object', () => { + const protoFilter = ReadRequest.fromPartial({ + filters: [{ + filters: [ + { + field: 'device_id', + operation: Filter_Operation.eq, + value: '12345' + }, + { + field: 'overall_status', + operation: Filter_Operation.in, + value: '["BAD", "GOOD"]', + type: Filter_ValueType.ARRAY, + }, + { + field: 'device_active', + operation: Filter_Operation.eq, + value: 'true', + type: Filter_ValueType.BOOLEAN, + }, + { + filters: [{ + filters: [{ + field: 'firstname', + operation: Filter_Operation.eq, + value: 'test_first' + }, { + field: 'lastname', + operation: Filter_Operation.eq, + value: 'test_last' + }, { + field: 'middleName', + operation: Filter_Operation.eq, + value: 'test_middle' + }], + operator: FilterOp_Operator.and, + }], + } + ], // Default And case + operator: FilterOp_Operator.or, + }] + }); + /* eslint-disable */ + const expectedDBObject = { "$or": [{ "device_id": "12345" }, { "overall_status": { "$in": ["BAD", "GOOD"] } }, { "device_active": true }, { "$and": [{ "firstname": "test_first" }, { "lastname": "test_last" }, { "middleName": "test_middle" }] }] }; + const dbFilter = toObject(protoFilter); + should.exist(dbFilter); + dbFilter.should.deepEqual(expectedDBObject); + }); + + it('should convert nested proto filter to valid DB filter object', () => { + const protoFilter = ReadRequest.fromPartial({ + filters: [ + { + filters: [ + { + filters: [ + { + filters: [ + { + field: 'user_type', + operation: Filter_Operation.neq, + value: 'TECHNICAL_USER' + }, + { + field: 'first_name', + operation: Filter_Operation.iLike, + value: '%test%' + }, + { + field: 'last_name', + operation: Filter_Operation.iLike, + value: '%test%' + } + ], + operator: FilterOp_Operator.and, + } + ] + }, + { + filters: [ + { + filters: [ + { + field: 'state', + operation: Filter_Operation.eq, + value: 'BW' + }, + { + field: 'city', + operation: Filter_Operation.eq, + value: 'Stuttgart' + }, + ], + operator: FilterOp_Operator.and, + } + ] + } + ], + operator: FilterOp_Operator.or, // Final Or operator + } + ] + }); + /* eslint-disable */ + const expectedDBObject = { "$or": [{ "$and": [{ "user_type": { "$not": { "$eq": "TECHNICAL_USER" } } }, { "first_name": { "$iLike": "%test%" } }, { "last_name": { "$iLike": "%test%" } }] }, { "$and": [{ "state": "BW" }, { "city": "Stuttgart" }] }] } + const dbFilter = toObject(protoFilter); + should.exist(dbFilter); + dbFilter.should.deepEqual(expectedDBObject); + }); + + it('should convert filters array to valid DB filter object', () => { + const protoFilter = ReadRequest.fromPartial({ + filters: [ + { + filters: [ + { + field: 'id', + operation: Filter_Operation.in, + value: 'test1', + } + ], + operator: FilterOp_Operator.and, + }, + { + filters: [ + { + field: 'id', + operation: Filter_Operation.eq, + value: 'test2', + } + ], + operator: FilterOp_Operator.or, + } + ] + }); + /* eslint-disable */ + const expectedDBObject = [{ "$and": [{ "id": { "$in": "test1" } }] }, { "$or": [{ "id": "test2" }] }] + const dbFilter = toObject(protoFilter); + should.exist(dbFilter); + dbFilter.should.deepEqual(expectedDBObject); + }); + +}); + +let meta = { + created: new Date(), + modified: new Date(), + created_by: 'Admin', + modified_by: 'Admin', + owners: [{ + id: 'urn:restorecommerce:acs:names:ownerIndicatoryEntity', + value: 'urn:restorecommerce:acs:model:user.User', + attributes: [{ + id: 'urn:restorecommerce:acs:names:ownerInstance', + value: 'Admin' + }] + }] +}; + + +describe('ServiceBase', () => { + let db: chassis.GraphDatabaseProvider; + let server: chassis.Server; + let bufferedServer: chassis.Server; + let events: Events; + let channel: Channel; + let testService: CRUDClient; + let testData: any; + let cfg; + + const today = new Date(); + const tomorrow = new Date(); + tomorrow.setDate(tomorrow.getDate() + 1); + beforeAll(async () => { + // Load test config from chassis service config + cfg = createServiceConfig(process.cwd() + '/test'); + + server = new chassis.Server(cfg.get('server')); + bufferedServer = new chassis.Server(cfg.get('bufferedServer')); + + events = new Events(cfg.get('events:testevents'), server.logger); + await events.start(); + const resourceName = 'resource'; + const testEvents: Topic = await events.topic('test'); + db = await chassis.database.get(cfg.get('database:testdb'), server.logger) as chassis.GraphDatabaseProvider; + db.registerCustomQuery!('testFilter', 'filter node.value < @customArguments.testParam', 'filter'); + + const bufferHandlerConfig: any = cfg.get('fieldHandlers:bufferFields'); + const entitiesNames = Object.keys(bufferHandlerConfig); + const requiredFieldsConfig: any = cfg.get('fieldHandlers:requiredFields'); + const timeStampFieldsConfigs: any = cfg.get('fieldHandlers:timeStampFields'); + let resourceFieldConfig: any = {}; + if (bufferHandlerConfig && entitiesNames.includes(resourceName)) { + resourceFieldConfig['bufferFields'] = bufferHandlerConfig[resourceName]; + } + if (requiredFieldsConfig && (resourceName in requiredFieldsConfig)) { + resourceFieldConfig['requiredFields'] = requiredFieldsConfig; + } + resourceFieldConfig['timeStampFields'] = []; + for (let timeStampFiledConfig of timeStampFieldsConfigs) { + if (timeStampFiledConfig.entities.includes(resourceName)) { + resourceFieldConfig['timeStampFields'].push(...timeStampFiledConfig.fields); + } + } + + const resourceAPI: ResourcesAPIBase = new ResourcesAPIBase(db, `${resourceName}s`, resourceFieldConfig); + let isEventsEnabled = cfg.get('events:enableEvents'); + if (isEventsEnabled === 'true') { + isEventsEnabled = true; + } else { // Undefined means events not enabled + isEventsEnabled = false; + } + const service = new ServiceBase('Resource', testEvents, + server.logger, resourceAPI, isEventsEnabled); + await server.bind('test', { + service: CRUDDefinition, + implementation: service as any + }); + + const bufferResourceName = 'testBufferedData'; + if (bufferHandlerConfig && entitiesNames.includes(bufferResourceName)) { + if (!resourceFieldConfig) { + resourceFieldConfig = {}; + } + resourceFieldConfig['bufferFields'] = bufferHandlerConfig[bufferResourceName]; + } + + // Create buffered service and bind it to gRPC server + const resourceBufferAPI: ResourcesAPIBase = new ResourcesAPIBase(db, `${bufferResourceName}s`, resourceFieldConfig); + const bufferService = new ServiceBase(bufferResourceName, testEvents, + bufferedServer.logger, resourceBufferAPI, isEventsEnabled); + await bufferedServer.bind('testBufferedService', { + service: CRUDDefinition, + implementation: bufferService as any + }); + + await server.start(); + await bufferedServer.start(); + + channel = createChannel(cfg.get('client:test').address); + testService = createClient({ + ...cfg.get('client:test'), + logger: server.logger + }, CRUDDefinition, channel); + }); + afterAll(async () => { + await channel.close(); + await server.stop(); + await bufferedServer.stop(); + await events.stop(); + }); + describe('endpoints', () => { + beforeEach(async () => { + db = await chassis.database.get(cfg.get('database:testdb'), server.logger) as chassis.GraphDatabaseProvider; + await db.truncate(); + testData = [ + { id: 'test_xy', meta, value: 1, text: 'first simple sentence for searching', active: true, created: today, status: 'GOOD' }, + { id: 'test_xyz', meta, value: 3, text: 'second test data', active: false, created: tomorrow, status: 'BAD' }, + { id: 'test_zy', meta, value: 12, text: 'third search data string', active: false, created: tomorrow, status: 'UNKNOWN' }]; + // await db.insert('resources', testData); + await testService.create({ items: testData, subject: { id: 'Admin' } }); + }); + describe('read', () => { + it('should return all three elements with no arguments', async () => { + const result = await testService.read({}); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(3); + result.items!.should.be.Array(); + result.items!.should.length(3); + for (let data of testData) { + delete data?.meta?.modified; + } + _.forEach(result.items, (item) => { + // delete modified field as it will be changed when creating + delete item!.payload!.meta!.modified; + testData.should.matchAny(item.payload); + }); + should.exist(result.operation_status); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('should return two elements with offset 1', async () => { + const compareData = _.drop((await testService.read({})).items, 1); + const result = await testService.read({ + offset: 1, + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(compareData.length); + result.items!.should.be.Array(); + result.items!.should.length(2); + _.sortBy(result.items, 'id').should.deepEqual(_.sortBy(compareData, 'id')); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('should return two elements with limit 2', async () => { + const compareData = _.dropRight((await testService.read({})).items, 1); + const result = await testService.read({ + limit: 2, + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(compareData.length); + result.items!.should.be.Array(); + result.items!.should.length(2); + _.sortBy(result.items, 'id').should.deepEqual(_.sortBy(compareData, 'id')); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('should return elements sorted', async () => { + const result = await testService.read({ + sorts: [{ + field: 'value', + order: Sort_SortOrder.DESCENDING, + }], + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(3); + result.items!.should.be.Array(); + result.items!.should.length(3); + const testDataDescending = testData.sort((a, b) => { + if (a.value > b.value) { + return -1; + } + if (a.value < b.value) { + return 1; + } + // a must be equal to b + return 0; + }); + // match the descending order + for (let i = 0; i < result.items!.length; i++) { + delete result.items![i].payload!.meta!.modified; + result.items![i].payload!.should.deepEqual(testDataDescending[i]); + } + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('should return only resources with value higher than 10', async () => { + const filters = [{ + filters: [{ + field: 'value', + operation: Filter_Operation.gt, + value: '10', + type: Filter_ValueType.NUMBER + }] + }]; + const result = await testService.read({ + filters + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(1); + result.items!.should.be.Array(); + result.items!.should.length(1); + delete result.items![0].payload!.meta!.modified; + result.items![0].payload!.should.deepEqual(testData[2]); // testData[2] is object with value > 10 + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('should return only resources with string filter value equal to id', async () => { + const filters = [{ + filters: [{ + field: 'id', + operation: Filter_Operation.eq, + value: 'test_xy' + }] + }]; + const result = await testService.read({ + filters + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(1); + result.items!.should.be.Array(); + result.items!.should.length(1); + delete result.items![0].payload!.meta!.modified; + result.items![0].payload!.should.deepEqual(testData[0]); // testData[9] is object with value 'test_xy' + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('should return only resources matching boolean filter', async () => { + const filters = [{ + filters: [{ + field: 'active', + operation: Filter_Operation.eq, + value: 'true', + type: Filter_ValueType.BOOLEAN + }] + }]; + const result = await testService.read({ + filters + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(1); + result.items!.should.be.Array(); + result.items!.should.length(1); + delete result.items![0].payload!.meta!.modified; + result.items![0].payload!.should.deepEqual(testData[0]); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('should return resources matching date filter', async () => { + const todayDatePlusOneMin = new Date(); + todayDatePlusOneMin.setSeconds(todayDatePlusOneMin.getSeconds() + 60); + // timeObject.setSeconds(timeObject.getSeconds() + 60); + const filters = [{ + filters: [{ + field: 'created', + operation: Filter_Operation.lt, + value: todayDatePlusOneMin.toString(), + type: Filter_ValueType.DATE, + }] + }]; + const result = await testService.read({ + filters + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(1); + result.items!.should.be.Array(); + result.items!.should.length(1); + const resultPayload = result.items!.map(item => item.payload); + _.sortBy(resultPayload[0]!.id, 'id').should.deepEqual(_.sortBy(_.filter(testData, (data) => { + return data.created <= today.getTime(); + })[0].id, 'id')); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('should return resources matching array filter', async () => { + const filters = [{ + filters: [{ + field: 'status', + operation: Filter_Operation.in, + value: '["BAD", "UNKNOWN"]', + type: Filter_ValueType.ARRAY, + }] + }]; + const result = await testService.read({ + filters + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(2); + result.items!.should.be.Array(); + result.items!.should.length(2); + const resultPayload = result.items!.map(item => { + delete item.payload!.meta?.modified; + return item.payload; + }); + _.sortBy(resultPayload, 'id').should.deepEqual(_.sortBy(_.filter(testData, (data) => { + // data.created = new Date(data.created); + delete data.meta!.modified; + return (data.status === "BAD" || data.status === "UNKNOWN"); + }), 'id')); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('should return only resources with not equal filter', async () => { + const filters = [{ + filters: [{ + field: 'id', + operation: Filter_Operation.neq, + value: 'test_xy', + }] + }]; + const result = await testService.read({ + filters + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(2); + result.items!.should.be.Array(); + result.items!.should.length(2); + // delete modified property + const resultPayload = result.items!.map(item => { + delete item.payload!.meta?.modified; + return item.payload; + }); + _.sortBy(resultPayload, 'id').should.deepEqual(_.sortBy(_.filter(testData, (data) => { + delete data.meta!.modified; + return data.id != 'test_xy'; + }), 'id')); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }, 4000); + it('should return elements only with field value', async () => { + const result = await testService.read({ + fields: [{ + name: 'value', + include: true, + }], + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + result.total_count!.should.be.equal(3); + result.items!.should.be.Array(); + result.items!.should.length(3); + const testDataReduced = [ + { value: testData[0].value }, + { value: testData[1].value }, + { value: testData[2].value }, + ]; + const resultPayload = result.items!.map(item => item.payload); + _.sortBy(resultPayload, 'value').should.deepEqual(_.sortBy(testDataReduced, 'value')); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('should apply a custom filter', async () => { + const result = await testService.read({ + fields: [{ + name: 'value', + include: true, + }], + custom_queries: ['testFilter'], + custom_arguments: { + value: Buffer.from(JSON.stringify({ testParam: 12 })) + } + }); + should.exist(result); + should.exist(result.items); + should.exist(result.total_count); + + result.total_count!.should.be.equal(2); + result.items!.should.be.Array(); + result.items!.should.length(2); + + const testDataReduced = [ + { value: testData[0].value }, + { value: testData[1].value }, + ]; + const resultPayload = result.items!.map(item => { + delete item.payload!.meta?.modified; + return item.payload; + }); + _.sortBy(resultPayload, 'value').should.deepEqual(_.sortBy(testDataReduced, 'value')); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + }); + it('fulltext search - should return only matching documents as per search string (default case insensitive)', async () => { + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + const result = await testService.read({ + search: { + search: 'EaRc' // will match search text from above `text` data and return 2 documents + } + }); + result.items!.length.should.equal(2); + result.items![0].payload!.id!.should.equal('test_xy'); + result.items![0].payload!.text!.should.equal('first simple sentence for searching'); + result.items![1].payload!.id!.should.equal('test_zy'); + result.items![1].payload!.text!.should.equal('third search data string'); + }, 5000); + + it('fulltext search - should return only matching documents as per search string (default case insensitive)', async () => { + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + const result = await testService.read({ + search: { + search: 'data' // will match search text from above `text` data and return 2 documents + } + }); + result.items!.length.should.equal(2); + result.items![0].payload!.id!.should.equal('test_xyz'); + result.items![0].payload!.text!.should.equal('second test data'); + result.items![1].payload!.id!.should.equal('test_zy'); + result.items![1].payload!.text!.should.equal('third search data string'); + }, 5000); + + it('fulltext search - should not return any matching documents as per search string with case sensitive search', async () => { + await new Promise((resolve, reject) => { + setTimeout(resolve, 2000); + }); + const result = await testService.read({ + search: { + search: 'DATA', // will not match search text from above `text` data and should not return any documents + case_sensitive: true + } + }); + should.not.exist(result.items); + }, 5000); + }); + describe('create', () => { + it('should create new documents and validate duplicate element error', async () => { + const meta = { + acl: [], + created_by: 'Admin', + modified_by: 'Admin', + owners: [{ + id: 'urn:restorecommerce:acs:names:ownerIndicatoryEntity', + value: 'urn:restorecommerce:acs:model:user.User', + attributes: [{ + attribute: [], + id: 'urn:restorecommerce:acs:names:ownerInstance', + value: 'Admin' + }] + }] + }; + const newTestDataFirst = { + id: 'test_newdata', + value: -10, + text: 'new data', + meta + }; + const newTestDataSecond = { + id: 'test_newdata2', + value: -10, + text: 'new second data', + meta + }; + const testDuplicate = { + id: 'test_newdata2', + value: -10, + text: 'new second data', + meta + }; + const newTestData = [newTestDataFirst, newTestDataSecond, testDuplicate]; + const result = await testService.create({ items: newTestData, subject: { id: 'Admin' } }); + should.exist(result); + should.exist(result.items); + result.items!.should.be.length(3); + result.items!.should.matchEach((e) => { + if (e.payload) { // since there is one element with payload undefined for duplicate element with error status + return e.payload!.value === -10 && e.payload!.text.length > 0; + } + }); + // validate overall status + should.exist(result.operation_status); + result.operation_status!.code!.should.equal(207); + // validate error status for duplicate element + result.items![2].status!.message!.should.equal( + `unique constraint violated - in index primary of type primary over '_key'; conflicting key: test_newdata2` + ); + result.items![2].status!.code!.should.equal(409); + const allTestData = await testService.read({}); + should.exist(allTestData); + should.exist(allTestData.operation_status); + // total 5 items should exist (3 from beginning, 2 from this test case) + allTestData.items!.length.should.equal(5); + + const compareData = _.concat(testData, _.map(result.items, (item) => item.payload)); + // delete modified property from meta data + for (let data of compareData) { + delete data?.meta?.modified; + } + _.forEach(allTestData.items, (e) => { + delete e.payload?.meta?.modified; + compareData.should.matchAny(e.payload!); + }); + }); + }); + describe('delete', () => { + it('should delete collection when requested', async () => { + const result = await testService.delete({ collection: true }); + should.exist(result); + should.exist(result.operation_status); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + + const allTestData = await testService.read({}); + should.exist(allTestData); + should.exist(allTestData.operation_status); + should.not.exist(allTestData.items); + allTestData.operation_status!.code!.should.equal(200); + allTestData.operation_status!.message!.should.equal('success'); + }); + it('should delete specified documents and return error if document does not exist', async () => { + const result = await testService.delete({ ids: [testData[1].id, 'invalidID'] }); + should.exist(result); + should.exist(result.status); + // success for 1st id and failure message for second invalid id + result.status![0].code!.should.equal(200); + result.status![0].message!.should.equal('success'); + result.status![1].code!.should.equal(404); + result.status![1].message!.should.equal('document not found'); + should.exist(result.operation_status); + result.operation_status!.code!.should.equal(207); + + const allTestData = await testService.read({}); + should.exist(allTestData); + should.exist(allTestData.operation_status); + should.exist(allTestData); + should.exist(allTestData.items); + allTestData.items!.should.length(2); + allTestData.operation_status!.code!.should.equal(200); + allTestData.operation_status!.message!.should.equal('success'); + const resultPayload = allTestData.items!.map(item => { + delete item.payload!.meta?.modified; + return item.payload; + }); + // delete modified property for testData[0] and testData[2] + delete testData[0]?.meta?.modified; + delete testData[2]?.meta?.modified; + _.sortBy(resultPayload, 'id') + .should.deepEqual(_.sortBy([testData[0], testData[2]], 'id')); + }); + }); + describe('update', () => { + it('should update all specified documents and validate status message', async () => { + const patch = _.map(testData, (data) => { + data.value = 100; + data.text = 'test-patch'; + return data; + }); + const result = await testService.update({ items: patch, subject: { id: 'Admin' } }); + should.exist(result); + should.exist(result.operation_status); + should.exist(result.items); + result.items!.should.matchEach((e) => { + return e.payload!.value === 100 && e.payload!.text.length === 10; + }); + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + + const allTestData = await testService.read({}); + should.exist(allTestData); + should.exist(allTestData.items); + should.exist(allTestData.operation_status); + allTestData.items!.length.should.equal(3); + result.items!.should.matchEach((e) => { + return e.payload!.value === 100 && e.payload!.text.length === 10; + }); + }); + it('should return an error when trying to update invalid document', async () => { + const patch = [{ + id: 'invalidDocument', + value: 2, + text: 'new value' + }]; + const result = await testService.update({ items: patch, subject: { id: 'Admin' } }); + result.items!.should.length(1); + should.exist(result.operation_status); + // validate status of item + result.items![0].status!.code!.should.equal(404); + result.items![0].status!.message!.should.equal('document not found'); + // overall status + result.operation_status!.code!.should.equal(207); + }); + }); + describe('upsert', () => { + it('should create or update specified documents', async () => { + const now = new Date(); + const newID = crypto.randomUUID(); + const replace = [{ + id: 'test_xy', + value: 0, + text: 'updated', + }, { + id: 'test_xyz', + value: 0, + text: 'updated', + }, { + id: newID, + value: 0, + text: 'created', + }]; + const result = await testService.upsert({ items: replace, subject: { id: 'Admin' } }); + should.exist(result); + result.items!.length.should.equal(3); + result.items![0].payload!.id!.should.equal('test_xy'); + should.exist(result.items![0].payload!.meta!.modified); // since it was updated it should have a modified timestamp. + result.items![0].payload!.meta!.modified!.getTime().should.be.greaterThan(now.getTime()); + + should.exist(result.items![2].payload!.meta!.created); + result.items![2].payload!.id!.should.equal(newID); + result.items![2].payload!.meta!.created!.getTime().should.be.greaterThan(now.getTime()); + should.exist(result.operation_status); + should.exist(result.items); + result.items!.should.matchEach((e) => { + return e.payload!.value === 0; + }); + // overall status + result.operation_status!.code!.should.equal(200); + result.operation_status!.message!.should.equal('success'); + + + }); + }); + // Test to check required field + describe('check required fileds', () => { + it('should return an error when trying to insert with missing requried fields', async () => { + let result = await testService.delete({ collection: true }); + should.exist(result); + should.exist(result.operation_status); + result.operation_status!.code!.should.equal(200); + const objectMissingField = [ + { id: 'test_xy', value: 1, meta }, + { id: 'test_xyz', value: 3, meta }, + { id: 'test_zy', value: 12, meta }]; + const result2 = await testService.create({ items: objectMissingField, subject: { id: 'Admin' } }); + should.exist(result2); + should.exist(result2.operation_status); + should.exist(result2.items); + result2.items!.should.length(3); + for (let item of result2.items!) { + item.status!.code!.should.equal(400); + item.status!.message!.should.startWith('Field text is necessary for resource in document'); + } + }); + }); + // Test to check buffered fields + describe('check buffered fileds', () => { + it('should decode the buffered field before storing in DB', + async () => { + // delete existing data and create new bufferdata message + await testService.delete({ collection: true }); + const bufData = { + type_url: '', + value: Buffer.from(JSON.stringify({ testkey: 'testValue' })) + }; + const bufferObjects = [ + { value: 1, data: bufData, meta, text: 'test1' }, + { value: 2, data: bufData, meta, text: 'test2' }]; + let resp = await testService.create({ items: bufferObjects, subject: { id: 'Admin' } }); + // Read directly from DB and compare the JSON data + // because normal read() operation again encodes and sends the data back. + // This way, we check if the data was actually encoded by reading it from the DB. + const result = await db.find('resources'); + should.exist(result); + should.exist(result[0]); + should.exist(result[0].data.testkey); + result[0].data.testkey.should.equal('testValue'); + // delete the collection + await db.truncate('resources'); + }); + }); + }); +}); diff --git a/packages/resource-base-interface/test/graph_traversal.spec.ts b/packages/resource-base-interface/test/graph_traversal.spec.ts new file mode 100644 index 00000000..fb3ee97a --- /dev/null +++ b/packages/resource-base-interface/test/graph_traversal.spec.ts @@ -0,0 +1,795 @@ +import { GraphResourcesServiceBase } from '../src/index.js'; +import { ResourcesAPIBase } from '../src/index.js'; +import { ServiceBase } from '../src/index.js'; +import * as chassis from '@restorecommerce/chassis-srv'; +import { createClient, createChannel, Channel } from '@restorecommerce/grpc-client'; +import { registerProtoMeta } from '@restorecommerce/kafka-client'; +import { Database } from 'arangojs'; +import { createServiceConfig } from '@restorecommerce/service-config'; +import { createLogger } from '@restorecommerce/logger'; +import * as should from 'should'; +import * as _ from 'lodash'; +import { + GraphServiceDefinition, + GraphServiceClient, + TraversalRequest, + Filter_Operation as FilterOperation, + Filters_Operator as OperatorType, + Options_Direction as Direction, + protoMetadata, + TraversalResponse +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/graph.js'; +import { + Sort_SortOrder +} from '@restorecommerce/rc-grpc-clients/dist/generated-server/io/restorecommerce/resource_base.js'; +import { it, describe, beforeAll, afterAll } from 'vitest'; + +registerProtoMeta(protoMetadata); + +const database = chassis.database; +let cfg = createServiceConfig(process.cwd() + '/test'); +let server = new chassis.Server(cfg.get('server')); +/* + * Note: To run this test, a running ArangoDB is required. + */ + +/* global describe it before after beforeEach */ + +const fetchAndEquals = async (result: AsyncIterable, expectedVertices: any[], pathCount = 0) => { + let traversalResponse = { data: new Array(), paths: new Array() }; + for await (const partResp of result) { + if ((partResp && partResp.data && partResp.data.value)) { + traversalResponse.data.push(...JSON.parse(partResp.data.value.toString())); + } + if ((partResp && partResp.paths && !_.isEmpty(partResp.paths.value))) { + traversalResponse.paths.push(...JSON.parse(partResp.paths!.value!.toString())); + } + } + + let finalVertices: any = []; + should.exist(traversalResponse.paths); + should.exist(traversalResponse.data); + traversalResponse.paths.should.have.size(pathCount); + traversalResponse.data.should.have.size(expectedVertices.length); + for (let eachVertice of traversalResponse.data) { + finalVertices.push(_.omit(eachVertice, ['_id', 'meta'])); + } + finalVertices = + _.sortBy(finalVertices, [(o) => { return o.id; }]); + finalVertices.should.deepEqual(expectedVertices); +}; + +const testProvider = (providerCfg) => { + describe('GraphServiceBase', () => { + let db: any; + let channel: Channel; + let testService: GraphServiceClient; + let testResourceBaseService: GraphServiceClient; + let graphCfg; + let resourcesList; + beforeAll(async () => { + db = await providerCfg.init(); + // graph Service + const graphAPIService = new GraphResourcesServiceBase(db, + cfg.get('fieldHandlers:bufferFields'), createLogger(cfg.get('server:logger'))); + let z: chassis.grpc.BindConfig = { + service: GraphServiceDefinition, + implementation: graphAPIService as any + }; + await server.bind('graphsTestService', z); + + await server.start(); + + channel = createChannel(cfg.get('client:graphsTestService').address); + testService = createClient({ + ...cfg.get('client:graphsTestService'), + logger: server.logger + }, GraphServiceDefinition, channel); + + // Start resource base server for the graph services + graphCfg = cfg.get('graph'); + resourcesList = Object.keys(graphCfg.vertices); + + channel = createChannel(cfg.get('client:test').address); + testResourceBaseService = createClient({ + ...cfg.get('client:test'), + logger: server.logger + }, GraphServiceDefinition, channel); + }); + afterAll(async () => { + // drop DB + const dbHost: string = cfg.get('database:testdb:host'); + const dbPort: string = cfg.get('database:testdb:port'); + const dbName: string = cfg.get('database:testdb:database'); + const db = new Database('http://' + dbHost + ':' + dbPort); + // await db.dropDatabase(dbName); + await channel.close(); + await server.stop(); + }); + + describe('Graphs Collection API', () => { + // STATE <-- lives PERSON has --> CAR belongsto --> PLACE resides --> STATE + let result_1: any, result_2: any, result_3: any, result_4: any; + let service_1: any, service_2: any, service_3: any, service_4: any; + let meta: any; + it('should create a vertex collection and insert data into it', async () => { + let meta = { + owner: [{ owner_entity: 'urn:restorecommerce:acs:model:User', owner_id: 'Admin' }] + }; + const personsVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i', meta }, + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j', meta } + ]; + const carsVertices = [ + { car: 'bmw', id: 'c', place_id: 'e', meta }, + { car: 'vw', id: 'd', place_id: 'f', meta } + ]; + const placesVertices = [ + { place: 'Munich', id: 'e', state_id: 'g', meta }, + { place: 'wolfsburg', id: 'f', state_id: 'h', meta } + ]; + const statesVertices = [ + { state: 'Bayern', id: 'g', meta }, + { state: 'Saxony', id: 'h', meta }, + { state: 'BW', id: 'i', meta }, + { state: 'Hessen', id: 'j', meta }, + ]; + const graphName = cfg.get('graph:graphName'); + const personCollection = 'persons'; + const carCollection = 'cars'; + const placeCollection = 'places'; + const stateCollection = 'states'; + + const resourceAPI4: ResourcesAPIBase = new ResourcesAPIBase(db, + stateCollection, undefined, graphCfg.vertices[stateCollection], graphName); + service_4 = new ServiceBase(stateCollection, undefined, + server.logger, resourceAPI4, false); + result_4 = await service_4.create({ items: statesVertices }); + + const resourceAPI3: ResourcesAPIBase = new ResourcesAPIBase(db, + placeCollection, undefined, graphCfg.vertices[placeCollection], graphName); + service_3 = new ServiceBase(placeCollection, undefined, + server.logger, resourceAPI3, false); + result_3 = await service_3.create({ items: placesVertices }); + + const resourceAPI2: ResourcesAPIBase = new ResourcesAPIBase(db, + carCollection, undefined, graphCfg.vertices[carCollection], graphName); + service_2 = new ServiceBase(carCollection, undefined, + server.logger, resourceAPI2, false); + result_2 = await service_2.create({ items: carsVertices }); + + const resourceAPI1: ResourcesAPIBase = new ResourcesAPIBase(db, + personCollection, undefined, graphCfg.vertices[personCollection], graphName); + service_1 = new ServiceBase(personCollection, undefined, + server.logger, resourceAPI1, false); + result_1 = await service_1.create({ items: personsVertices }); + }); + // test error handling + it('should throw an error for graph traversal for missing collection name / start_vertex', async () => { + // missing collection name in vertices + let result = testService.traversal({ vertices: { start_vertex_ids: ['a'] } }); + for await (const partResp of result) { + partResp.operation_status!.code!.should.equal(500); + partResp.operation_status!.message!.should.equal('missing collection name for vertex id a'); + } + + result = testService.traversal({ vertices: { collection_name: 'person' } }); + for await (const partResp of result) { + partResp.operation_status!.code!.should.equal(500); + partResp.operation_status!.message!.should.equal('missing vertex id for collection_name person'); + } + + // empty collection name for collections + result = testService.traversal({ collection: { collection_name: '' } }); + for await (const partResp of result) { + partResp.operation_status!.code!.should.equal(500); + partResp.operation_status!.message!.should.equal('One of the Vertices or Collection should be defined'); + } + }); + + // traversal without path flag + it('should traverse the graph and return only vertices for Person A', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['a'] + }, + opts: { direction: Direction.OUTBOUND }, + path: false + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { state: 'Bayern', id: 'g' }, + { state: 'BW', id: 'i' } + ]; + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices); + }); + + // traversal with path flag + it('should traverse the graph and return both vertices and paths when paths flag is set to true', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['a'] + }, + opts: { direction: Direction.OUTBOUND }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { state: 'Bayern', id: 'g' }, + { state: 'BW', id: 'i' } + ]; + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 4); + }); + + // include vertices + it('should traverse the graph with included vertices options and return only the included vertices', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['a'] + }, + opts: { direction: Direction.OUTBOUND, include_vertexs: ['cars'] }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { car: 'bmw', id: 'c', place_id: 'e' } + ]; + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 1); + }); + + // exclude vertices + it('should traverse the graph with excluded vertices options and return only traversed data with excluded vertices', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['a'] + }, + opts: { direction: Direction.OUTBOUND, exclude_vertexs: ['cars'] }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { state: 'Bayern', id: 'g' }, + { state: 'BW', id: 'i' } + ]; + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 3); + }); + + // include edges + it('should traverse the graph with included edges options and return vertices from included edges', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['a'] + }, + opts: { direction: Direction.OUTBOUND, include_edges: ['has'] }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { car: 'bmw', id: 'c', place_id: 'e' } + ]; + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 1); + }); + + // exclude edges + it('should traverse the graph with exclude edges options and return vertices from excluded edges', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['a'] + }, + opts: { direction: Direction.OUTBOUND, exclude_edges: ['belongs'] }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { state: 'Bayern', id: 'g' }, + { state: 'BW', id: 'i' } + ]; + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 3); + }); + + // exclude one edge and include another edge of same entity + it('for 2 entities should exclude one entity edge and include another entity edge', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['a'] + }, + opts: { direction: Direction.OUTBOUND, exclude_edges: ['resides'], include_edges: ['lives'] }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { state: 'BW', id: 'i' } + ]; + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 1); + }); + + // collection traversal + it('should traverse the entire collection and return data from all traversed entities', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'persons' + }, + opts: { direction: Direction.OUTBOUND }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { car: 'vw', id: 'd', place_id: 'f' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { place: 'wolfsburg', id: 'f', state_id: 'h' }, + { state: 'Bayern', id: 'g' }, + { state: 'Saxony', id: 'h' }, + { state: 'BW', id: 'i' }, + { state: 'Hessen', id: 'j' } + ]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 8); + }); + + // Filter tests for collection traversal + it('with filters should traverse the collection and return data with filtering applied on respective entities', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'persons' + }, + opts: { direction: Direction.OUTBOUND }, + filters: [{ + filters: [{ field: 'car', operation: FilterOperation.eq, value: 'bmw' }], + entity: 'cars' + }, { + filters: [{ field: 'place', operation: FilterOperation.eq, value: 'Munich' }], + operator: OperatorType.or, + entity: 'places' + }], + path: true + }); + const expectedVertices = [{ name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { state: 'Bayern', id: 'g' }, + { state: 'Saxony', id: 'h' }, + { state: 'BW', id: 'i' }, + { state: 'Hessen', id: 'j' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 6); + }); + + // filters with include vertices + it('should traverse the graph with filters and included vertices options and return only the filtered and included vertices', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'persons' + }, + opts: { direction: Direction.OUTBOUND, include_vertexs: ['cars'] }, + filters: [{ + filters: [{ field: 'car', operation: FilterOperation.eq, value: 'bmw' }, { field: 'car', operation: FilterOperation.eq, value: 'vw' }], + operator: OperatorType.or, + entity: 'cars' + }], + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { car: 'vw', id: 'd', place_id: 'f' } + ]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 2); + }); + + // filter with exclude vertices + it('should traverse the graph with filters and excluded vertices options and return only the filtered and excluded vertices', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'persons' + }, + opts: { direction: Direction.OUTBOUND, exclude_vertexs: ['cars'] }, + filters: [{ + filters: [{ field: 'state', operation: FilterOperation.eq, value: 'BW' }, { field: 'state', operation: FilterOperation.eq, value: 'Hessen' }], + operator: OperatorType.or, // Default is AND operation + entity: 'state' + }], + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { place: 'wolfsburg', id: 'f', state_id: 'h' }, + { state: 'Bayern', id: 'g' }, + { state: 'Saxony', id: 'h' }, + { state: 'BW', id: 'i' }, + { state: 'Hessen', id: 'j' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 6); + }); + + // filter with exclude edges + it('for 2 entities should exclude one entity edge and include another entity edge with filtering enabled on second edge entity', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'persons' + }, + opts: { direction: Direction.OUTBOUND, exclude_edges: ['resides'] }, + filters: [{ + filters: [{ field: 'state', operation: FilterOperation.eq, value: 'BW' }, { field: 'state', operation: FilterOperation.eq, value: 'Hessen' }], + operator: OperatorType.or, // Default is AND operation + edge: 'lives' + }], + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { car: 'vw', id: 'd', place_id: 'f' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { place: 'wolfsburg', id: 'f', state_id: 'h' }, + { state: 'BW', id: 'i' }, + { state: 'Hessen', id: 'j' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 6); + }); + + // filter with include edges + it('should traverse the graph with filters and included edges and return only the filtered and included edge vertices data', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'persons' + }, + opts: { direction: Direction.OUTBOUND, include_edges: ['has', 'lives'] }, + filters: [{ + filters: [{ field: 'state', operation: FilterOperation.eq, value: 'BW' }, { field: 'state', operation: FilterOperation.eq, value: 'Hessen' }], + operator: OperatorType.or, // Default is AND operation + edge: 'lives' + }], + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { car: 'vw', id: 'd', place_id: 'f' }, + { state: 'BW', id: 'i' }, + { state: 'Hessen', id: 'j' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 4); + }); + + // pagination - with limit should traverse along only the limit entities + it('pagination - should traverse the graph through only first entity when limit filter is specified for root entity', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'persons', + limit: 1 + }, + opts: { direction: Direction.OUTBOUND }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { state: 'Bayern', id: 'g' }, + { state: 'BW', id: 'i' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 4); + }); + + // pagination with both limit and offset + it('pagination - should traverse the graph through only from second entity when limit and offset filter is specified for root entity', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'persons', + limit: 1, + offset: 1 + }, + opts: { direction: Direction.OUTBOUND }, + path: true + }); + const expectedVertices = [ + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j' }, + { car: 'vw', id: 'd', place_id: 'f' }, + { place: 'wolfsburg', id: 'f', state_id: 'h' }, + { state: 'Saxony', id: 'h' }, + { state: 'Hessen', id: 'j' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 4); + }); + + // traversal through list of vertices + it('array start vertices - should traverse the graph through list of specified start vertices', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['a', 'b'] + }, + opts: { direction: Direction.OUTBOUND }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { car: 'vw', id: 'd', place_id: 'f' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { place: 'wolfsburg', id: 'f', state_id: 'h' }, + { state: 'Bayern', id: 'g' }, + { state: 'Saxony', id: 'h' }, + { state: 'BW', id: 'i' }, + { state: 'Hessen', id: 'j' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 8); + }); + + // traversal from Car entity with specified vertices + it('car entity - should traverse the graph from Car vertice and return list of traversed entities from Car entity', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'cars', + start_vertex_ids: ['c'] + }, + opts: { direction: Direction.OUTBOUND }, + path: true + }); + const expectedVertices = [ + { car: 'bmw', id: 'c', place_id: 'e' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { state: 'Bayern', id: 'g' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 2); + }); + + // collection traversal from car entity + it('car entity - should traverse the graph from Car Collection and return all list of traversed entities from Car entity', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'cars' + }, + opts: { direction: Direction.OUTBOUND }, + path: true + }); + const expectedVertices = [ + { car: 'bmw', id: 'c', place_id: 'e' }, + { car: 'vw', id: 'd', place_id: 'f' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { place: 'wolfsburg', id: 'f', state_id: 'h' }, + { state: 'Bayern', id: 'g' }, + { state: 'Saxony', id: 'h' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 4); + }); + + // traversal from Place entity with inbound vertices + it('inbound traversal - should traverse the graph from Place vertice in inbound direction and return list of traversed entities from Place entity', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'places', + start_vertex_ids: ['e'] + }, + opts: { direction: Direction.INBOUND }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { place: 'Munich', id: 'e', state_id: 'g' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 2); + }); + + // traversal from Place Collection with inbound vertices + it('inbound traversal - should traverse the graph from Place collection in inbound direction and return list of all traversed entities from Place entity', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'places' + }, + opts: { direction: Direction.INBOUND }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j' }, + { car: 'bmw', id: 'c', place_id: 'e' }, + { car: 'vw', id: 'd', place_id: 'f' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { place: 'wolfsburg', id: 'f', state_id: 'h' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 4); + }); + + // sort root collection in DESC order + it('should sort the root collection in descending order and return data from all traversed entities', async () => { + const traversalRequest = TraversalRequest.fromPartial({ + collection: { + collection_name: 'persons', + sorts: [{ field: 'name', order: Sort_SortOrder.DESCENDING }] + }, + opts: { direction: Direction.OUTBOUND }, + path: true + }); + // traverse graph + let result = testService.traversal(traversalRequest); + + let traversalResponse = { data: new Array(), paths: new Array() }; + for await (const partResp of result) { + if ((partResp && partResp.data && partResp.data.value)) { + traversalResponse.data.push(...JSON.parse(partResp.data.value.toString())); + } + if ((partResp && partResp.paths && partResp.paths.value)) { + traversalResponse.paths.push(...JSON.parse(partResp.paths.value.toString())); + } + } + + should.exist(traversalResponse.paths); + should.exist(traversalResponse.data); + // Descending order for persons entity + traversalResponse.data[0].name.should.equal('Bob'); + traversalResponse.data[1].name.should.equal('Alice'); + }); + + // update the edge data for car_id for persons + // then do a traversal request for Alice Person and for Bob person + // separately and verify the cars are interchanged + it('should validate update person vertices with updated car id', async () => { + // update meta as well + const meta = { + owner: [{ owner_entity: 'urn:restorecommerce:acs:model:User', owner_id: 'NewAdmin' }] + }; + const updatedPersonsVertices = [ + { name: 'Alice', id: 'a', car_id: 'd', state_id: 'i', meta }, + { name: 'Bob', id: 'b', car_id: 'c', state_id: 'j', meta } + ]; + result_1 = await service_1.update({ items: updatedPersonsVertices }); + + // Alice traversal request + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['a'] + }, + opts: { direction: Direction.OUTBOUND, include_vertexs: ['cars'] }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'd', state_id: 'i' }, + { car: 'vw', id: 'd', place_id: 'f' } + ]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 1); + + // Bob traversal request + const traversalRequest1 = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['b'] + }, + opts: { direction: Direction.OUTBOUND, include_vertexs: ['cars'] }, + path: true + }); + const expectedVertices1 = [ + { name: 'Bob', id: 'b', car_id: 'c', state_id: 'j' }, + { car: 'bmw', id: 'c', place_id: 'e' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest1), expectedVertices1, 1); + }); + + // do an upsert api for Person with interchanging the car_id again and then inserting a new person + // Read Alice again and verify it has old car + // Read new Person and only person should exist + it('should validate upsert person vertices with updated car id and inserting new person vertice', async () => { + // update meta as well + const meta = { + owner: [{ owner_entity: 'urn:restorecommerce:acs:model:User', owner_id: 'Admin' }] + }; + const upsertedPersonsVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i', meta }, + { name: 'Bob', id: 'b', car_id: 'd', state_id: 'j', meta }, + { name: 'NewPerson', id: 'newPersonID', car_id: 'c', state_id: 'i', meta } + ]; + result_1 = await service_1.upsert({ items: upsertedPersonsVertices }); + + // Alice traversal request to verify car_id is reverted + const traversalRequest = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['a'] + }, + opts: { direction: Direction.OUTBOUND, include_vertexs: ['cars'] }, + path: true + }); + const expectedVertices = [ + { name: 'Alice', id: 'a', car_id: 'c', state_id: 'i' }, + { car: 'bmw', id: 'c', place_id: 'e' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest), expectedVertices, 1); + + // NewPerson traversal request to verify connected edges + const traversalRequest1 = TraversalRequest.fromPartial({ + vertices: { + collection_name: 'persons', + start_vertex_ids: ['newPersonID'] + }, + opts: { direction: Direction.OUTBOUND }, + path: true + }); + const expectedVertices1 = [ + { car: 'bmw', id: 'c', place_id: 'e' }, + { place: 'Munich', id: 'e', state_id: 'g' }, + { state: 'Bayern', id: 'g' }, + { state: 'BW', id: 'i' }, + { name: 'NewPerson', id: 'newPersonID', car_id: 'c', state_id: 'i' }]; + + // traverse graph + await fetchAndEquals(testService.traversal(traversalRequest1), expectedVertices1, 4); + }); + + it('delete vertices, should delete the edges associated as well', + async () => { + // Deleting the ids of vertexCollection 'cars' should remove + // both 'person_has_car' and 'car_has_org' both edges + await service_2.delete({ collection: 'cars' }); + // await service_2.delete({ ids: ['c', 'd'] }); + }); + }); + }); +}; + +const providers = [ + { + name: 'arango', + init: async (): Promise => { + return database.get( + cfg.get('database:testdb'), + server.logger, + 'testGraph', + cfg.get('graph:edgeDefinitions') + ); + } + } +]; +providers.forEach((providerCfg) => { + describe(`with database provider ${providerCfg.name}`, () => { + testProvider(providerCfg); + }); +}); diff --git a/packages/resource-base-interface/test/views/resources_view.json b/packages/resource-base-interface/test/views/resources_view.json new file mode 100644 index 00000000..1a9326f3 --- /dev/null +++ b/packages/resource-base-interface/test/views/resources_view.json @@ -0,0 +1,73 @@ +{ + "view": { + "collectionName": "resources", + "viewName": "resources_view", + "similarityThreshold": 0.1, + "options": { + "type": "arangosearch", + "links": { + "resources": { + "includeAllFields": true, + "fields": { + "text": { + "analyzers": [ + "trigram", + "trigram_norm" + ] + } + } + } + } + } + }, + "analyzers": [ + "trigram", + "trigram_norm" + ], + "analyzerOptions": [ + { + "trigram": { + "type": "ngram", + "properties": { + "min": 3, + "max": 3, + "preserveOriginal": true + }, + "features": [ + "frequency", + "position", + "norm" + ] + } + }, + { + "trigram_norm": { + "type": "pipeline", + "properties": { + "pipeline": [ + { + "type": "norm", + "properties": { + "locale": "de.utf-8", + "case": "lower" + } + }, + { + "type": "ngram", + "properties": { + "min": 3, + "max": 3, + "preserveOriginal": true + } + } + ] + }, + "features": [ + "frequency", + "position", + "norm" + ] + } + } + ] + } \ No newline at end of file diff --git a/packages/resource-base-interface/tsconfig.json b/packages/resource-base-interface/tsconfig.json new file mode 100644 index 00000000..67b1780c --- /dev/null +++ b/packages/resource-base-interface/tsconfig.json @@ -0,0 +1,18 @@ +{ + "extends": "../../node_modules/@restorecommerce/dev/tsconfig.json", + "include": [ + "./src/core/*.ts", + "./src/experimental/*.ts", + "./src/*.ts", + ], + "exclude": [ + "node_modules", + "lib", + "doc" + ], + "compilerOptions": { + "outDir": "lib", + "emitDecoratorMetadata": true, + "experimentalDecorators": true + }, +} \ No newline at end of file diff --git a/packages/resource-base-interface/tsconfig.test.json b/packages/resource-base-interface/tsconfig.test.json new file mode 100644 index 00000000..b21d56e5 --- /dev/null +++ b/packages/resource-base-interface/tsconfig.test.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "test" + }, + "include": [ + "./test/**/*.ts" + ] +} diff --git a/packages/resource-base-interface/vitest.config.ts b/packages/resource-base-interface/vitest.config.ts new file mode 100644 index 00000000..e1a270bc --- /dev/null +++ b/packages/resource-base-interface/vitest.config.ts @@ -0,0 +1,2 @@ +import config from '../../node_modules/@restorecommerce/dev/vitest.config.js'; +export default config;