diff --git a/CHANGELOG.md b/CHANGELOG.md index 578e0e5b84..a787d4cba6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,21 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## 0.28.0 + +### Breaking Changes + +#### Cloud config + +- `deploymentId` and `encryptedDeploymentId` removed from **uploadAdapter** options + +### Added + +#### resolve runtime + +- `clientIp` in request object + + ## 0.27.0 ### Breaking Changes diff --git a/docs/README.md b/docs/README.md index b566904710..e89b306b09 100644 --- a/docs/README.md +++ b/docs/README.md @@ -51,8 +51,8 @@ title: reSolve Documentation - [Advanced Techniques](advanced-techniques.md) - [Splitting Code Into Chunks](advanced-techniques.md#splitting-code-into-chunks) - - [Server-Side Rendering](advanced-techniques.md#server-side-rendering) - [Adapters](advanced-techniques.md#adapters) + - [Custom Read Models](advanced-techniques.md#custom-read-models) - [Modules](advanced-techniques.md#modules) - [Authentication and Authorization](authentication-and-authorization.md) diff --git a/docs/advanced-techniques.md b/docs/advanced-techniques.md index c46a29902e..cf50bc882c 100644 --- a/docs/advanced-techniques.md +++ b/docs/advanced-techniques.md @@ -37,6 +37,147 @@ Resolve comes with a set of adapters covering popular DBMS choices. You can also Note that reSolve does not force you to use adapters. For example, you may need to implement a Read Model on top of some arbitrary system, such as a full-text-search engine, OLAP or a particular SQL database. In such case, you can just work with that system in the code of the projection function and query resolver, without writing a new Read Model adapter. +## Custom Read Model Connectors + +You can implement a custom Read Model connector to define how a Read Model's data is stored. A connector implements the following functions: + +- **connect** - Initializes a connection to a storage. +- **disconnect** - Closes the storage connection. +- **drop** - Removes the Read Model's data from storage. +- **dispose** - Forcefully disposes all unmanaged resources used by Read Models served by this connector. + +The code sample below demonstrates how to implement a connector that provides a file-based storage for Read Models. + +##### common/read-models/custom-read-model-connector.js: + + + +[mdis]:# (../tests/custom-readmodel-sample/connector.js) +```js +import fs from 'fs' + +const safeUnlinkSync = filename => { + if (fs.existsSync(filename)) { + fs.unlinkSync(filename) + } +} + +export default options => { + const prefix = String(options.prefix) + const readModels = new Set() + const connect = async readModelName => { + fs.writeFileSync(`${prefix}${readModelName}.lock`, true, { flag: 'wx' }) + readModels.add(readModelName) + const store = { + get() { + return JSON.parse(String(fs.readFileSync(`${prefix}${readModelName}`))) + }, + set(value) { + fs.writeFileSync(`${prefix}${readModelName}`, JSON.stringify(value)) + } + } + return store + } + const disconnect = async (store, readModelName) => { + safeUnlinkSync(`${prefix}${readModelName}.lock`) + readModels.delete(readModelName) + } + const drop = async (store, readModelName) => { + safeUnlinkSync(`${prefix}${readModelName}.lock`) + safeUnlinkSync(`${prefix}${readModelName}`) + } + const dispose = async () => { + for (const readModelName of readModels) { + safeUnlinkSync(`${prefix}${readModelName}.lock`) + } + readModels.clear() + } + return { + connect, + disconnect, + drop, + dispose + } +} +``` + + + +A connector is defined as a function that receives an `options` argument. This argument contains a custom set of options that you can specify in the connector's configuration. + +Register the connector in the application's configuration file. + +##### config.app.js: + +```js +readModelConnectors: { + customReadModelConnector: { + module: 'common/read-models/custom-read-model-connector.js', + options: { + prefix: path.join(__dirname, 'data') + path.sep // Path to a folder that contains custom Read Model store files + } + } +} +``` + +Now you can assign the custom connector to a Read Model by name as shown below. + +##### config.app.js: + +```js + readModels: [ + { + name: 'CustomReadModel', + projection: 'common/read-models/custom-read-model.projection.js', + resolvers: 'common/read-models/custom-read-model.resolvers.js', + connectorName: 'customReadModelConnector' + } + ... + ] +``` + +The code sample below demonstrates how you can use the custom store's API in the Read Model's code. + +##### common/read-models/custom-read-model.projection.js: + + + +[mdis]:# (../tests/custom-readmodel-sample/projection.js) +```js +const projection = { + Init: async store => { + await store.set(0) + }, + INCREMENT: async (store, event) => { + await store.set((await store.get()) + event.payload) + }, + DECREMENT: async (store, event) => { + await store.set((await store.get()) - event.payload) + } +} + +export default projection +``` + + + +##### common/read-models/custom-read-model.resolvers.js: + + + +[mdis]:# (../tests/custom-readmodel-sample/resolvers.js) +```js +const resolvers = { + read: async store => { + return await store.get() + } +} + +export default resolvers +``` + + + ## Modules In reSolve, a module encapsulates a fragment of functionality that can be included by an application. A module can include any structural parts of a reSolve application in any combination. diff --git a/docs/api-reference.md b/docs/api-reference.md index 188f2018a9..94b199005d 100644 --- a/docs/api-reference.md +++ b/docs/api-reference.md @@ -1840,11 +1840,11 @@ Multiple middleware functions are run in the order they are specified in the opt This section lists request middleware included into the @resolve-js/client package. The following middleware is available: -| Name | Description | -| ------------------- | --------------------------------------------------------- | -| [parseResponse]() | Deserializes the response data if it contains valid JSON. | -| [retryOnError]() | Retries the request if the server responds with an error. | -| [waitForResponse]() | Validates the response and retries if validation fails. | +| Name | Description | +| ----------------------------------- | --------------------------------------------------------- | +| [parseResponse](#parseresponse) | Deserializes the response data if it contains valid JSON. | +| [retryOnError](#retryonerror) | Retries the request if the server responds with an error. | +| [waitForResponse](#waitforresponse) | Validates the response and retries if validation fails. | ##### parseResponse diff --git a/docs/application-configuration.md b/docs/application-configuration.md index 6840ecfacb..5cb1e4c1fd 100644 --- a/docs/application-configuration.md +++ b/docs/application-configuration.md @@ -10,7 +10,7 @@ This document describes configuration options available for a reSolve applicatio In a new reSolve application, configuration settings are split across the following files for different run targets: - **config.app.js** - Contains general app configuration settings. In this file, you should register the application's aggregates, Read Models and View Models. -- **config.cloud.js** - Contains configuration settings that target the [reSolve Cloud](cloud-overview.md) environment. +- **config.cloud.js** - Contains configuration settings that target the reSolve Cloud environment. - **config.dev.js** - Contains configuration settings that target the development server. - **config.prod.js** - Contains configuration settings that target the production server. - **config.test_functional.js** - Contains configuration settings that target the test environment. diff --git a/docs/read-side.md b/docs/read-side.md index 82ebb05230..26f2e45cb8 100644 --- a/docs/read-side.md +++ b/docs/read-side.md @@ -99,147 +99,6 @@ const appConfig = { In the configuration object, specify the View Model's name and the path to the file containing projection definition. You can also specify the View Model snapshot storage adapter. Use the **serializeState** and **deserializeState** options to specify paths to a View Model's serializer and deserializer functions. Specify the **resolver** option to add a [View Model resolver](#view-model-resolver) to the View Model. -### Custom Read Models - -To create a custom Read Model, you need to manually implement a Read Model connector. A connector defines functions that manage a custom Read Model's store. The following functions can be defined: - -- **connect** - Initializes a connection to a storage. -- **disconnect** - Closes the storage connection. -- **drop** - Removes the Read Model's data from storage. -- **dispose** - Forcefully disposes all unmanaged resources used by Read Models served by this connector. - -The code sample below demonstrates how to implement a connector that provides a file-based storage for Read Models. - -##### common/read-models/custom-read-model-connector.js: - - - -[mdis]:# (../tests/custom-readmodel-sample/connector.js) -```js -import fs from 'fs' - -const safeUnlinkSync = filename => { - if (fs.existsSync(filename)) { - fs.unlinkSync(filename) - } -} - -export default options => { - const prefix = String(options.prefix) - const readModels = new Set() - const connect = async readModelName => { - fs.writeFileSync(`${prefix}${readModelName}.lock`, true, { flag: 'wx' }) - readModels.add(readModelName) - const store = { - get() { - return JSON.parse(String(fs.readFileSync(`${prefix}${readModelName}`))) - }, - set(value) { - fs.writeFileSync(`${prefix}${readModelName}`, JSON.stringify(value)) - } - } - return store - } - const disconnect = async (store, readModelName) => { - safeUnlinkSync(`${prefix}${readModelName}.lock`) - readModels.delete(readModelName) - } - const drop = async (store, readModelName) => { - safeUnlinkSync(`${prefix}${readModelName}.lock`) - safeUnlinkSync(`${prefix}${readModelName}`) - } - const dispose = async () => { - for (const readModelName of readModels) { - safeUnlinkSync(`${prefix}${readModelName}.lock`) - } - readModels.clear() - } - return { - connect, - disconnect, - drop, - dispose - } -} -``` - - - -A connector is defined as a function that receives an `options` argument. This argument contains a custom set of options that you can specify in the connector's configuration. - -Register the connector in the application's configuration file. - -##### config.app.js: - -```js -readModelConnectors: { - customReadModelConnector: { - module: 'common/read-models/custom-read-model-connector.js', - options: { - prefix: path.join(__dirname, 'data') + path.sep // Path to a folder that contains custom Read Model store files - } - } -} -``` - -Now you can assign the custom connector to a Read Model by name as shown below. - -##### config.app.js: - -```js - readModels: [ - { - name: 'CustomReadModel', - projection: 'common/read-models/custom-read-model.projection.js', - resolvers: 'common/read-models/custom-read-model.resolvers.js', - connectorName: 'customReadModelConnector' - } - ... - ] -``` - -The code sample below demonstrates how you can use the custom store's API in the Read Model's code. - -##### common/read-models/custom-read-model.projection.js: - - - -[mdis]:# (../tests/custom-readmodel-sample/projection.js) -```js -const projection = { - Init: async store => { - await store.set(0) - }, - INCREMENT: async (store, event) => { - await store.set((await store.get()) + event.payload) - }, - DECREMENT: async (store, event) => { - await store.set((await store.get()) - event.payload) - } -} - -export default projection -``` - - - -##### common/read-models/custom-read-model.resolvers.js: - - - -[mdis]:# (../tests/custom-readmodel-sample/resolvers.js) -```js -const resolvers = { - read: async store => { - return await store.get() - } -} - -export default resolvers -``` - - - ## Initialize a Read Model Each Read Model projection object should define an **Init** function that initializes the Read Model storage. @@ -324,11 +183,15 @@ Refer to the [Query a Read Model](#query-a-read-model) section for information o ## View Model Specifics -**View Models** are a special kind of Read Models. They are queried based on aggregate ID and and can automatically provide updates to Redux state on the client. View Models are defined in a special isomorphic format so their code can also be used on the client side to provide reducer logic. +**View Models** are ephemeral Read Models that are queried based on aggregate ID. They have the following properties: + +- View Models are rebuilt on every request. They do not store persistent state and do not use the Read Model store. +- View Models are queried based on aggregate ID and can maintain a WebSocket connection to push data updates to the client. +- View Model projections are defined in a format that is isomorphic with Redux reducers so their code can also be used on the client side to define reducer logic. Use View Models in the following scenarios: -- To create aggregate-centric views. Such views request relatively small portions of data based on aggregate IDs. +- To create aggregate-centric views that request relatively small portions of data based on aggregate IDs. - To create reactive components, whose state is kept up-to date on the client. A View Model's projection function receives a state and an event object, and returns an updated state. A projection function runs for every event with the specified aggregate ID from the beginning of the history on every request so it is important to keep View Models small. You can also store snapshots of the View Model state to optimize system resource consumption. @@ -356,8 +219,6 @@ The code sample below demonstrates a View Model projection function: Refer to the [Query a View Model](#query-a-view-model) section, for information on how to query a View Model. -Note that a View Model does not use the Read Model store. - ## View Model Resolver A View Model's **resolver** allows you to restrict a user's access to the View Model's data. A resolver function receives the following parameters: @@ -370,9 +231,9 @@ In the resolver's code, you can use arbitrary logic to check a user's access per The resolver function should return a built View Model data object and a meta object that contains the following data: -- A cursor returned by the `buildViewModel` function; -- A list of event types; -- A list of aggregate IDs. +- The data cursor used to traverse the events included into the query result set. The initial cursor is returned by the `buildViewModel` function; +- A list of event types available to the client; +- A list of aggregate IDs available to the client. The code sample below demonstrates a View Model resolver implementation: diff --git a/examples/cli-uploader/config.cloud.js b/examples/cli-uploader/config.cloud.js index cbc5ddff12..06d3c8690e 100644 --- a/examples/cli-uploader/config.cloud.js +++ b/examples/cli-uploader/config.cloud.js @@ -44,10 +44,6 @@ export default { }, uploadAdapter: { options: { - encryptedDeploymentId: declareRuntimeEnv( - 'RESOLVE_ENCRYPTED_DEPLOYMENT_ID' - ), - deploymentId: declareRuntimeEnv('RESOLVE_DEPLOYMENT_ID'), CDN: declareRuntimeEnv('RESOLVE_UPLOADER_URL'), uploaderArn: declareRuntimeEnv('RESOLVE_UPLOADER_LAMBDA_ARN'), }, diff --git a/examples/image-gallery/config.cloud.js b/examples/image-gallery/config.cloud.js index 028960ad0e..b0953cab68 100644 --- a/examples/image-gallery/config.cloud.js +++ b/examples/image-gallery/config.cloud.js @@ -33,10 +33,6 @@ export default { }, uploadAdapter: { options: { - encryptedDeploymentId: declareRuntimeEnv( - 'RESOLVE_ENCRYPTED_DEPLOYMENT_ID' - ), - deploymentId: declareRuntimeEnv('RESOLVE_DEPLOYMENT_ID'), CDN: declareRuntimeEnv('RESOLVE_UPLOADER_URL'), uploaderArn: declareRuntimeEnv('RESOLVE_UPLOADER_LAMBDA_ARN'), }, diff --git a/examples/personal-data/config.cloud.js b/examples/personal-data/config.cloud.js index a5b9724e56..925f7d29c7 100644 --- a/examples/personal-data/config.cloud.js +++ b/examples/personal-data/config.cloud.js @@ -31,10 +31,6 @@ const cloudConfig = { }, uploadAdapter: { options: { - encryptedDeploymentId: declareRuntimeEnv( - 'RESOLVE_ENCRYPTED_DEPLOYMENT_ID' - ), - deploymentId: declareRuntimeEnv('RESOLVE_DEPLOYMENT_ID'), CDN: declareRuntimeEnv('RESOLVE_UPLOADER_URL'), uploaderArn: declareRuntimeEnv('RESOLVE_UPLOADER_LAMBDA_ARN'), }, diff --git a/functional-tests/app/client/hooks/components/FileUploader.js b/functional-tests/app/client/hooks/components/FileUploader.js new file mode 100644 index 0000000000..4586b15104 --- /dev/null +++ b/functional-tests/app/client/hooks/components/FileUploader.js @@ -0,0 +1,162 @@ +import React, { useState, useCallback, useContext } from 'react' +import { Form, Input, Button, FormGroup, CustomInput } from 'reactstrap' +import FileUploadProgress from 'react-fileupload-progress' +import { + getCDNBasedUrl, + getFormUpload, + getToken, +} from '@resolve-js/module-uploader' + +import UploaderContext from '../context' +import { v4 as uuid } from 'uuid' + +const DIRECTORY = 'images' + +const FileUploader = () => { + const [state, setState] = useState({ + form: { + fields: {}, + url: '', + }, + uploadId: null, + token: '', + mimeType: '', + fileName: '', + picked: false, + loaded: null, + }) + + const { + form: { url, fields }, + uploadId, + token, + mimeType, + loaded, + picked, + } = state + + const uploaderContext = useContext(UploaderContext) + const { CDNUrl } = uploaderContext + + const handleGetUrl = useCallback(() => { + getFormUpload({ dir: DIRECTORY }).then((result) => { + const { form, uploadId } = result + getToken({ dir: DIRECTORY }).then((token) => + setState({ + ...state, + token, + form, + uploadId, + aggregateId: uuid(), + loaded: false, + }) + ) + }) + }, [state]) + + const handlePickFile = () => { + setState({ ...state, picked: true }) + } + + const inputRef = React.createRef() + + const uploadFormRender = (onSubmitHandler) => ( +
+ {Object.keys(fields).map((key, index) => ( + + ))} + + + + + +
+ ) + + const formGetter = () => { + const form = new FormData(document.querySelector('#uploadForm')) + return form + } + + const onLoad = useCallback(() => { + setState({ + ...state, + loaded: true, + uploadId, + }) + }, [uploadId]) + + return ( +
+
+ {uploadId == null && ( + + + + )} +
+ {uploadId != null && ( +
+ +
+ )} + {loaded && uploadId != null && ( + + )} +
+ ) +} + +export { FileUploader } diff --git a/functional-tests/app/client/hooks/context.js b/functional-tests/app/client/hooks/context.js new file mode 100644 index 0000000000..c82fb574d7 --- /dev/null +++ b/functional-tests/app/client/hooks/context.js @@ -0,0 +1,7 @@ +import React from 'react' + +const UploaderContext = React.createContext({ + CDNUrl: undefined, +}) + +export default UploaderContext diff --git a/functional-tests/app/client/hooks/routes.js b/functional-tests/app/client/hooks/routes.js index d3cf915784..a219ff0b89 100644 --- a/functional-tests/app/client/hooks/routes.js +++ b/functional-tests/app/client/hooks/routes.js @@ -2,6 +2,7 @@ import { App } from './components/App' import { Counter } from './components/Counter' import { UseRequestMiddleware } from './components/UseRequestMiddleware' import { SecretsManager } from './components/SecretsManager' +import { FileUploader } from './components/FileUploader' export default [ { @@ -21,6 +22,11 @@ export default [ component: SecretsManager, exact: true, }, + { + path: '/file-uploader', + component: FileUploader, + exact: true, + }, ], }, ] diff --git a/functional-tests/app/client/index.js b/functional-tests/app/client/index.js index ec7ab297a9..a731a30068 100644 --- a/functional-tests/app/client/index.js +++ b/functional-tests/app/client/index.js @@ -3,8 +3,10 @@ import { render } from 'react-dom' import { ResolveProvider } from '@resolve-js/react-hooks' import { Router } from 'react-router' import { createBrowserHistory } from 'history' + import Routes from './hooks/components/Routes' import routes from './hooks/routes' +import UploaderContext from './hooks/context' const entryPoint = (resolveContext) => { const history = createBrowserHistory({ basename: resolveContext.rootPath }) @@ -22,9 +24,11 @@ const entryPoint = (resolveContext) => { render( - - - + + + + + , appContainer ) diff --git a/functional-tests/app/config.cloud.js b/functional-tests/app/config.cloud.js index 561eb93394..b705141db2 100644 --- a/functional-tests/app/config.cloud.js +++ b/functional-tests/app/config.cloud.js @@ -31,4 +31,13 @@ export default { }, }, }, + uploadAdapter: { + options: { + encryptedUserId: declareRuntimeEnv('RESOLVE_ENCRYPTED_USER_ID'), + userId: declareRuntimeEnv('RESOLVE_USER_ID'), + CDN: declareRuntimeEnv('RESOLVE_UPLOADER_URL'), + uploaderArn: declareRuntimeEnv('RESOLVE_UPLOADER_LAMBDA_ARN'), + scope: 'functional-tests', + }, + }, } diff --git a/functional-tests/app/config.dev.js b/functional-tests/app/config.dev.js index 4c25bb5624..9665caf4db 100644 --- a/functional-tests/app/config.dev.js +++ b/functional-tests/app/config.dev.js @@ -54,6 +54,13 @@ const devConfig = { name: 'jwt', maxAge: 31536000000, }, + uploadAdapter: { + options: { + directory: 'data', + bucket: 'files', + secretKey: 'key', + }, + }, } export default devConfig diff --git a/functional-tests/app/package.json b/functional-tests/app/package.json index f4fb246d8c..67a10d3a6c 100644 --- a/functional-tests/app/package.json +++ b/functional-tests/app/package.json @@ -17,6 +17,7 @@ "@resolve-js/eventstore-lite": "0.28.3", "@resolve-js/eventstore-postgresql-serverless": "0.28.3", "@resolve-js/module-admin": "0.28.3", + "@resolve-js/module-uploader": "0.28.3", "@resolve-js/react-hooks": "0.28.3", "@resolve-js/readmodel-lite": "0.28.3", "@resolve-js/readmodel-postgresql-serverless": "0.28.3", @@ -36,10 +37,12 @@ "react-router": "5.1.2", "react-router-config": "5.1.1", "react-router-dom": "5.1.2", + "reactstrap": "8.9.0", "redux": "4.0.5", "redux-devtools-extension": "2.13.7", "redux-saga": "1.0.5", - "uuid": "8.3.1" + "uuid": "8.3.1", + "react-fileupload-progress": "0.5.0" }, "devDependencies": { "@babel/core": "7.9.6", diff --git a/functional-tests/app/run.js b/functional-tests/app/run.js index 6d1d7f5497..1f9e0707f3 100644 --- a/functional-tests/app/run.js +++ b/functional-tests/app/run.js @@ -7,6 +7,7 @@ import { reset, } from '@resolve-js/scripts' import resolveModuleAdmin from '@resolve-js/module-admin' +import resolveModuleUploader from '@resolve-js/module-uploader' import appConfig from './config.app' import cloudConfig from './config.cloud' @@ -16,7 +17,13 @@ const launchMode = process.argv[2] void (async () => { try { - const baseConfig = merge(defaultResolveConfig, appConfig) + const moduleUploader = resolveModuleUploader({ + publicDirs: ['images'], + expireTime: 604800, + jwtSecret: 'SECRETJWT', + }) + + const baseConfig = merge(defaultResolveConfig, appConfig, moduleUploader) switch (launchMode) { case 'dev': { diff --git a/functional-tests/testcafe/react-hooks/file-uploader.test.ts b/functional-tests/testcafe/react-hooks/file-uploader.test.ts new file mode 100644 index 0000000000..58e73e10b4 --- /dev/null +++ b/functional-tests/testcafe/react-hooks/file-uploader.test.ts @@ -0,0 +1,29 @@ +import { Selector } from 'testcafe' +import * as fetch from 'isomorphic-fetch' + +import { getTargetURL } from '../../utils/utils' + +const targetUrl = `${getTargetURL()}/file-uploader` + +fixture`React Hooks: file uploader`.beforeEach(async (t) => { + await t.setNativeDialogHandler(() => true) + await t.navigateTo(targetUrl) +}) + +test('upload file', async (t) => { + const button = Selector('button').withText('Upload image') + + await t.click(button) + + const uploadButton = Selector('button').withText('Upload') + + await t + .setFilesToUpload('#fileUpload', '../../utils/test-file.png') + .click(uploadButton) + + const fileLink = await Selector('#link').getAttribute('href') + + const res = await fetch(fileLink) + + await t.expect(res.status).eql(200) +}) diff --git a/functional-tests/utils/test-file.png b/functional-tests/utils/test-file.png new file mode 100644 index 0000000000..13127c53e7 Binary files /dev/null and b/functional-tests/utils/test-file.png differ diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/concurrent-error.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/concurrent-error.ts index ed42372051..68dcf0b5ae 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/concurrent-error.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/concurrent-error.ts @@ -1,7 +1,7 @@ const ConcurrentError = (function (this: Error, aggregateId: string): void { Error.call(this) this.name = 'ConcurrentError' - this.message = `Can not save the event because aggregate '${aggregateId}' is not actual at the moment. Please retry later.` + this.message = `Cannot save the event because the aggregate '${aggregateId}' is currently out of date. Please retry later.` if (Error.captureStackTrace) { Error.captureStackTrace(this, ConcurrentError) diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/constants.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/constants.ts index 3e253bf8f1..879e1234a6 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/constants.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/constants.ts @@ -1,8 +1,8 @@ -export const MAINTENANCE_MODE_AUTO = Symbol() -export const MAINTENANCE_MODE_MANUAL = Symbol() +export const MAINTENANCE_MODE_AUTO = Symbol.for('MAINTENANCE_MODE_AUTO') +export const MAINTENANCE_MODE_MANUAL = Symbol.for('MAINTENANCE_MODE_MANUAL') -export const PARTIAL_EVENT_FLAG = Symbol() -export const PARTIAL_SECRET_FLAG = Symbol() +export const PARTIAL_EVENT_FLAG = Symbol.for('PARTIAL_EVENT_FLAG') +export const PARTIAL_SECRET_FLAG = Symbol.for('PARTIAL_SECRET_FLAG') export const BUFFER_SIZE = 512 * 1024 export const BATCH_SIZE = 200 diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/snapshot-trigger.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/snapshot-trigger.ts index cd95d323b7..c1adf5d765 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/snapshot-trigger.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/snapshot-trigger.ts @@ -16,12 +16,12 @@ const snapshotTrigger = async < log.verbose(`bucketSize: ${bucketSize}`) if (snapshotKey == null || snapshotKey.constructor !== String) { - const error = new Error('Snapshot key must be string') + const error = new Error('Snapshot key must be a string') log.error(error.message) throw error } if (content == null || content.constructor !== String) { - const error = new Error('Snapshot content must be string') + const error = new Error('Snapshot content must be a string') log.error(error.message) throw error } diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/types.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/types.ts index 4d05ba8f94..e6059a2b17 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/types.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-base/src/types.ts @@ -118,7 +118,7 @@ export const EventFilterSchema = new t.Type( return t.failure( u, c, - `Event filter start time can't be larger than finishTime` + `Event filter start time cannot be later than finishTime` ) } } @@ -127,7 +127,7 @@ export const EventFilterSchema = new t.Type( return t.failure( u, c, - 'cursor or at least one of startTime or finishTime should be defined' + 'Cursor or at least one of startTime or finishTime should be defined' ) } }), diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-base/test/maintenance-mode.test.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-base/test/maintenance-mode.test.ts new file mode 100644 index 0000000000..3a536650f4 --- /dev/null +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-base/test/maintenance-mode.test.ts @@ -0,0 +1,23 @@ +/* eslint-disable import/no-extraneous-dependencies */ +/* eslint-disable @typescript-eslint/no-var-requires */ + +describe('Regression test: maintenance mode', () => { + test('Strict Equals Operator "===" should return true for a/node_modules/@resolve-js/eventstore-base and b/node_modules/@resolve-js/eventstore-base', () => { + const A = require('../src/constants') + const { + MAINTENANCE_MODE_AUTO: A_MAINTENANCE_MODE_AUTO, + MAINTENANCE_MODE_MANUAL: A_MAINTENANCE_MODE_MANUAL, + } = A + + jest.resetModules() + + const B = require('../src/constants') + const { + MAINTENANCE_MODE_AUTO: B_MAINTENANCE_MODE_AUTO, + MAINTENANCE_MODE_MANUAL: B_MAINTENANCE_MODE_MANUAL, + } = B + + expect(A_MAINTENANCE_MODE_AUTO).toEqual(B_MAINTENANCE_MODE_AUTO) + expect(A_MAINTENANCE_MODE_MANUAL).toEqual(B_MAINTENANCE_MODE_MANUAL) + }) +}) diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/commit-incremental-import.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/commit-incremental-import.ts index aa9a346edc..53c5fd8504 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/commit-incremental-import.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/commit-incremental-import.ts @@ -126,7 +126,7 @@ const commitIncrementalImport = async ( /^SQLITE_ERROR:.*? not exists$/.test(error.message)) ) { throw new Error( - `Either event batch has timestamps from the past nor incremental importId=${importId} does not exist` + `Either event batch has timestamps from the past or incremental importId=${importId} does not exist` ) } else { throw error diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/ensure-event-subscriber.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/ensure-event-subscriber.ts index 2352be5902..4db3afd573 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/ensure-event-subscriber.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/ensure-event-subscriber.ts @@ -24,7 +24,7 @@ const ensureEventSubscriber = async ( (!updateOnly && destination == null) ) { throw new Error( - `Parameters "destination" and "updateOnly" are mutual exclusive` + `Parameters "destination" and "updateOnly" are mutually exclusive` ) } diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/init-events.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/init-events.ts index 7507dc3700..4dcf4c604f 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/init-events.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/init-events.ts @@ -73,7 +73,7 @@ const initEvents = async ({ (error) => { if (isAlreadyExistsError(error.message)) { return new EventstoreResourceAlreadyExistError( - `duplicate initialization of the sqlite adapter with same events database "${databaseFile}" and table "${eventsTableName}" is not allowed` + `duplicate initialization of the sqlite adapter with the same event database "${databaseFile}" and table "${eventsTableName}" is not allowed` ) } return null diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/init-secrets.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/init-secrets.ts index dc1f94f750..069fff97db 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/init-secrets.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/init-secrets.ts @@ -30,7 +30,7 @@ const initSecrets = async ({ (error) => { if (isAlreadyExistsError(error.message)) { return new EventstoreResourceAlreadyExistError( - `duplicate initialization of the sqlite adapter with same events database "${databaseFile}" and table "${secretsTableName}" is not allowed` + `duplicate initialization of the sqlite adapter with the same event database "${databaseFile}" and table "${secretsTableName}" is not allowed` ) } return null diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/load-snapshot.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/load-snapshot.ts index 31bdf825a6..ad5360fd18 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/load-snapshot.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-lite/src/load-snapshot.ts @@ -5,7 +5,7 @@ const loadSnapshot = async ( snapshotKey: string ): Promise => { if (snapshotKey == null || snapshotKey.constructor !== String) { - throw new Error('Snapshot key must be string') + throw new Error('Snapshot key must be a string') } const result = await database.get( diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/commit-incremental-import.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/commit-incremental-import.ts index 549df1d9ba..bc4b7a4e20 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/commit-incremental-import.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/commit-incremental-import.ts @@ -217,7 +217,7 @@ const commitIncrementalImport = async ( if (errno === ER_SUBQUERY_NO_1_ROW || errno === ER_NO_SUCH_TABLE) { throw new Error( - `Either event batch has timestamps from the past nor incremental importId=${importId} does not exist` + `Either event batch has timestamps from the past or incremental importId=${importId} does not exist` ) } else { throw error diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/ensure-event-subscriber.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/ensure-event-subscriber.ts index 7d172cd195..b957d8a1f3 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/ensure-event-subscriber.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/ensure-event-subscriber.ts @@ -25,7 +25,7 @@ const ensureEventSubscriber = async ( (!updateOnly && destination == null) ) { throw new Error( - `Parameters "destination" and "updateOnly" are mutual exclusive` + `Parameters "destination" and "updateOnly" are mutually exclusive` ) } diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/init-events.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/init-events.ts index dba2cb267b..4be7366ab8 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/init-events.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/init-events.ts @@ -77,7 +77,7 @@ const initEvents = async (pool: AdapterPool): Promise => { (error) => { if (isAlreadyExistsError(error)) { return new EventstoreResourceAlreadyExistError( - `duplicate initialization of the mysql adapter with same events database "${database}" and table "${eventsTableName}" is not allowed` + `duplicate initialization of the mysql adapter with same event database "${database}" and table "${eventsTableName}" is not allowed` ) } return null diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/init-secrets.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/init-secrets.ts index 4799520839..f919ee2641 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/init-secrets.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/init-secrets.ts @@ -32,7 +32,7 @@ const initSecrets = async (pool: AdapterPool): Promise => { (error) => { if (isAlreadyExistsError(error)) { return new EventstoreResourceAlreadyExistError( - `duplicate initialization of the mysql adapter with same events database "${database}" and table "${secretsTableName}" is not allowed` + `duplicate initialization of the mysql adapter with the same event database "${database}" and table "${secretsTableName}" is not allowed` ) } return null diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/load-snapshot.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/load-snapshot.ts index 5284bf0627..f0325e4c93 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/load-snapshot.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/load-snapshot.ts @@ -5,7 +5,7 @@ const loadSnapshot = async ( snapshotKey: string ): Promise => { if (snapshotKey == null || snapshotKey.constructor !== String) { - throw new Error('Snapshot key must be string') + throw new Error('Snapshot key must be a string') } const snapshotsTableNameAsId: string = escapeId(snapshotsTableName) diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/set-secret.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/set-secret.ts index 2226fcdc79..53a2fb5bb2 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/set-secret.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-mysql/src/set-secret.ts @@ -33,7 +33,7 @@ const setSecret = async ( COMMIT;` - log.verbose(`SQL query verbose output hidden due to security`) + log.verbose(`SQL query verbose output hidden due to security reasons`) try { log.debug(`executing SQL query`) diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/commit-incremental-import.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/commit-incremental-import.ts index 1ea2ed0b03..a0c9ad4066 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/commit-incremental-import.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/commit-incremental-import.ts @@ -237,7 +237,7 @@ const commitIncrementalImport = async ( /subquery used as an expression/i.test(error.message)) ) { throw new Error( - `Either event batch has timestamps from the past nor incremental importId=${importId} does not exist` + `Either event batch has timestamps from the past or incremental importId=${importId} does not exist` ) } else { throw error diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/ensure-event-subscriber.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/ensure-event-subscriber.ts index 00bde05943..a8b69eb78f 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/ensure-event-subscriber.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/ensure-event-subscriber.ts @@ -31,7 +31,7 @@ const ensureEventSubscriber = async ( (!updateOnly && destination == null) ) { throw new Error( - `Parameters "destination" and "updateOnly" are mutual exclusive` + `Parameters "destination" and "updateOnly" are mutually exclusive` ) } diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/load-snapshot.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/load-snapshot.ts index 769c9e475b..97ac83b23c 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/load-snapshot.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql-serverless/src/load-snapshot.ts @@ -14,7 +14,7 @@ const loadSnapshot = async ( isTimeoutError, } = pool if (snapshotKey == null || snapshotKey.constructor !== String) { - throw new Error('Snapshot key must be string') + throw new Error('Snapshot key must be a string') } const databaseNameAsId: string = escapeId(databaseName) diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/commit-incremental-import.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/commit-incremental-import.ts index e36f755baf..27063adb0b 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/commit-incremental-import.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/commit-incremental-import.ts @@ -237,7 +237,7 @@ const commitIncrementalImport = async ( /subquery used as an expression/i.test(error.message)) ) { throw new Error( - `Either event batch has timestamps from the past nor incremental importId=${importId} does not exist` + `Either event batch has timestamps from the past or incremental importId=${importId} does not exist` ) } else { throw error diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/drop-events.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/drop-events.ts index 2eafc2318d..3af18ac57a 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/drop-events.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/drop-events.ts @@ -15,7 +15,7 @@ const dropEvents = async ({ }: AdapterPool): Promise => { const log = getLog('dropEvents') - log.debug(`dropping events tables`) + log.debug(`dropping event tables`) log.verbose(`secretsTableName: ${secretsTableName}`) log.verbose(`databaseName: ${databaseName}`) diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/ensure-event-subscriber.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/ensure-event-subscriber.ts index 00bde05943..a8b69eb78f 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/ensure-event-subscriber.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/ensure-event-subscriber.ts @@ -31,7 +31,7 @@ const ensureEventSubscriber = async ( (!updateOnly && destination == null) ) { throw new Error( - `Parameters "destination" and "updateOnly" are mutual exclusive` + `Parameters "destination" and "updateOnly" are mutually exclusive` ) } diff --git a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/load-snapshot.ts b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/load-snapshot.ts index 571a63bb37..481cd1c353 100644 --- a/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/load-snapshot.ts +++ b/packages/runtime/adapters/eventstore-adapters/eventstore-postgresql/src/load-snapshot.ts @@ -11,7 +11,7 @@ const loadSnapshot = async ( snapshotKey: string ): Promise => { if (snapshotKey == null || snapshotKey.constructor !== String) { - throw new Error('Snapshot key must be string') + throw new Error('Snapshot key must be a string') } const databaseNameAsId = escapeId(databaseName) diff --git a/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/create-adapter.ts b/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/create-adapter.ts index 754098ba1a..934afbdf0b 100644 --- a/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/create-adapter.ts +++ b/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/create-adapter.ts @@ -53,7 +53,7 @@ const createAdapter = < if (Object.keys(restApi).length > 0) { throw new Error( - `Read-model adapter implementation should not provide extra methods: ${JSON.stringify( + `Read model adapter implementation should not provide extra methods: ${JSON.stringify( Object.keys(restApi) )}` ) @@ -88,7 +88,7 @@ const createAdapter = < StoreApi >) { if (typeof storeApi[key] !== 'function') { - throw new Error(`Store API method ${key} should be function`) + throw new Error(`Store API method ${key} should be a function`) } } @@ -96,7 +96,7 @@ const createAdapter = < AdapterOperations >) { if (typeof adapterOperations[key] !== 'function') { - throw new Error(`Adapter operation method ${key} should be function`) + throw new Error(`Adapter operation method ${key} should be a function`) } } diff --git a/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/wrap-disconnect.ts b/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/wrap-disconnect.ts index 2aaa8eabdc..a4a629e31f 100644 --- a/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/wrap-disconnect.ts +++ b/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/wrap-disconnect.ts @@ -18,7 +18,7 @@ const disconnectImpl = async < ): Promise => { const adapterPool = pool.adapterPoolMap.get(store) if (adapterPool == null) { - throw new Error(`Read-model adapter pool is null`) + throw new Error(`Read model adapter pool is null`) } pool.adapterPoolMap.delete(store) await disconnect(adapterPool) diff --git a/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/wrap-operation.ts b/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/wrap-operation.ts index d8cb68ec15..278538ac40 100644 --- a/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/wrap-operation.ts +++ b/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/wrap-operation.ts @@ -22,7 +22,7 @@ const operationImpl = async < ): Promise>> => { const adapterPool = pool.adapterPoolMap.get(store) if (adapterPool == null) { - throw new Error(`Read-model adapter pool is null`) + throw new Error(`Read model adapter pool is null`) } const result = await operationFunc(adapterPool, readModelName, ...args) return result diff --git a/packages/runtime/adapters/readmodel-adapters/readmodel-lite/src/convert-binary-row.ts b/packages/runtime/adapters/readmodel-adapters/readmodel-lite/src/convert-binary-row.ts index 597c3718a5..0437cf0cdb 100644 --- a/packages/runtime/adapters/readmodel-adapters/readmodel-lite/src/convert-binary-row.ts +++ b/packages/runtime/adapters/readmodel-adapters/readmodel-lite/src/convert-binary-row.ts @@ -20,7 +20,7 @@ const convertBinaryRow: ConvertBinaryRowMethod = ( ) => { if (fieldList != null && fieldList.constructor !== Object) { throw new Error( - 'Field list should be object with enumerated selected fields' + 'Field list should be an object with enumerated selected fields' ) } const row: RowLike = excludeObjectField(inputRow, `RESOLVE-${readModelName}`) diff --git a/packages/runtime/adapters/readmodel-adapters/readmodel-mysql/src/convert-binary-row.ts b/packages/runtime/adapters/readmodel-adapters/readmodel-mysql/src/convert-binary-row.ts index c79dfe74af..4d0f0c5d1e 100644 --- a/packages/runtime/adapters/readmodel-adapters/readmodel-mysql/src/convert-binary-row.ts +++ b/packages/runtime/adapters/readmodel-adapters/readmodel-mysql/src/convert-binary-row.ts @@ -21,7 +21,7 @@ const excludeObjectField = < const convertBinaryRow: ConvertBinaryRowMethod = (inputRow, fieldList) => { if (fieldList != null && fieldList.constructor !== Object) { throw new Error( - 'Field list should be object with enumerated selected fields' + 'Field list should be an object with enumerated selected fields' ) } const row: RowLike = excludeObjectField( diff --git a/packages/runtime/adapters/readmodel-adapters/readmodel-postgresql-serverless/src/convert-result-row.ts b/packages/runtime/adapters/readmodel-adapters/readmodel-postgresql-serverless/src/convert-result-row.ts index a3b8131cfd..7e0ada1151 100644 --- a/packages/runtime/adapters/readmodel-adapters/readmodel-postgresql-serverless/src/convert-result-row.ts +++ b/packages/runtime/adapters/readmodel-adapters/readmodel-postgresql-serverless/src/convert-result-row.ts @@ -3,7 +3,7 @@ import type { ConvertResultRowMethod, RowLike } from './types' const convertResultRow: ConvertResultRowMethod = (inputRow, fieldList) => { if (fieldList != null && fieldList.constructor !== Object) { throw new Error( - 'Field list should be object with enumerated selected fields' + 'Field list should be an object with enumerated selected fields' ) } const row: RowLike = { ...inputRow } diff --git a/packages/runtime/adapters/readmodel-adapters/readmodel-postgresql/src/convert-result-row.ts b/packages/runtime/adapters/readmodel-adapters/readmodel-postgresql/src/convert-result-row.ts index 6e2a9b7925..f7e5dae813 100644 --- a/packages/runtime/adapters/readmodel-adapters/readmodel-postgresql/src/convert-result-row.ts +++ b/packages/runtime/adapters/readmodel-adapters/readmodel-postgresql/src/convert-result-row.ts @@ -3,7 +3,7 @@ import type { ConvertResultRowMethod, RowLike } from './types' const convertResultRow: ConvertResultRowMethod = (inputRow, fieldList) => { if (fieldList != null && fieldList.constructor !== Object) { throw new Error( - 'Field list should be object with enumerated selected fields' + 'Field list should be an object with enumerated selected fields' ) } const row: RowLike = { ...inputRow } diff --git a/packages/runtime/runtime/src/cloud/init-uploader.js b/packages/runtime/runtime/src/cloud/init-uploader.js index d27e4cef0e..49cc159b62 100644 --- a/packages/runtime/runtime/src/cloud/init-uploader.js +++ b/packages/runtime/runtime/src/cloud/init-uploader.js @@ -6,7 +6,7 @@ import crypto from 'crypto' import mime from 'mime-types' const createPresignedPut = async ( - { uploaderArn, deploymentId, encryptedDeploymentId }, + { uploaderArn, userId, encryptedUserId }, dir ) => { const lambda = new Lambda() @@ -16,8 +16,8 @@ const createPresignedPut = async ( FunctionName: uploaderArn, Payload: JSON.stringify({ type: 'put', - deploymentId, - encryptedDeploymentId, + userId, + encryptedUserId, dir, }), }) @@ -59,7 +59,7 @@ export const upload = (pool, uploadUrl, filePath) => { } const createPresignedPost = async ( - { uploaderArn, deploymentId, encryptedDeploymentId }, + { uploaderArn, userId, encryptedUserId }, dir ) => { const lambda = new Lambda() @@ -69,8 +69,8 @@ const createPresignedPost = async ( FunctionName: uploaderArn, Payload: JSON.stringify({ type: 'post', - deploymentId, - encryptedDeploymentId, + userId, + encryptedUserId, dir, }), }) @@ -111,12 +111,12 @@ export const uploadFormData = (pool, form, filePath) => { } export const createToken = ( - { encryptedDeploymentId }, + { encryptedUserId }, { dir, expireTime = 3600 } ) => { const payload = Buffer.from( JSON.stringify({ - encryptedDeploymentId, + encryptedUserId, dir, expireTime: Date.now() + expireTime * 1000, }) @@ -125,7 +125,7 @@ export const createToken = ( .replace(/=/g, '') const signature = crypto - .createHmac('md5', encryptedDeploymentId) + .createHmac('md5', encryptedUserId) .update(payload) .digest('hex') @@ -133,7 +133,15 @@ export const createToken = ( } const createUploader = (config) => { - const { deploymentId, CDN, encryptedDeploymentId } = config + const { CDN } = config + + const userId = process.env['RESOLVE_USER_ID'] + const encryptedUserId = process.env['RESOLVE_ENCRYPTED_USER_ID'] + + Object.assign(config, { + userId, + encryptedUserId, + }) return Object.freeze({ createPresignedPut: createPresignedPut.bind(null, config), @@ -141,9 +149,9 @@ const createUploader = (config) => { createPresignedPost: createPresignedPost.bind(null, config), uploadFormData: uploadFormData.bind(null, config), createToken: createToken.bind(null, config), - deploymentId, CDN, - encryptedDeploymentId, + userId, + encryptedUserId, }) } @@ -160,7 +168,7 @@ const initUploader = async (resolve) => { // TODO: provide support for custom uploader adapter const createUploadAdapter = resolve.assemblies.uploadAdapter const uploader = createUploader(createUploadAdapter()) - process.env.RESOLVE_UPLOADER_CDN_URL = `https://${uploader.CDN}/${uploader.deploymentId}` + process.env.RESOLVE_UPLOADER_CDN_URL = `https://${uploader.CDN}/${uploader.userId}` Object.assign(resolve, { uploader: { diff --git a/packages/runtime/runtime/src/cloud/wrap-api-handler.js b/packages/runtime/runtime/src/cloud/wrap-api-handler.js index 15206cd69d..8e9883fb77 100644 --- a/packages/runtime/runtime/src/cloud/wrap-api-handler.js +++ b/packages/runtime/runtime/src/cloud/wrap-api-handler.js @@ -146,6 +146,8 @@ const createRequest = async (lambdaEvent, customParameters) => { ? cookie.parse(headers.cookie) : {} + const clientIp = headers['X-Forwarded-For'] + const req = Object.create(null) req.isLambdaEdgeRequest = isLambdaEdgeRequest @@ -157,6 +159,7 @@ const createRequest = async (lambdaEvent, customParameters) => { headers, cookies, body, + clientIp, ...customParameters, } diff --git a/packages/runtime/runtime/src/common/handlers/uploader-handler.js b/packages/runtime/runtime/src/common/handlers/uploader-handler.js index d8587fe089..1acd10fad5 100644 --- a/packages/runtime/runtime/src/common/handlers/uploader-handler.js +++ b/packages/runtime/runtime/src/common/handlers/uploader-handler.js @@ -8,6 +8,15 @@ import extractRequestBody from '../utils/extract-request-body' const log = debugLevels('resolve:runtime:uploader-handler') +const cors = (res) => { + res.setHeader('Access-Control-Allow-Origin', '*') + res.setHeader( + 'Access-Control-Allow-Methods', + 'DELETE,GET,HEAD,OPTIONS,PATCH,POST,PUT' + ) + res.setHeader('Access-Control-Allow-Headers', 'Authorization') +} + const uploaderHandler = async (req, res) => { try { const { directory, bucket, secretKey } = req.resolve.uploader @@ -59,6 +68,9 @@ const uploaderHandler = async (req, res) => { } fs.writeFileSync(`${dirName}/${uploadId}`, data, { flag: 'w+' }) + + cors(res) + res.end() } else if (req.method === 'GET') { const uploadParams = req.matchedParams.params if (uploadParams == null || uploadParams.constructor !== String) { @@ -111,9 +123,13 @@ const uploaderHandler = async (req, res) => { const file = fs.readFileSync(path.join(bucketPath, dir, uploadId)) + cors(res) res.setHeader('Content-Type', metadata['Content-Type']) res.setHeader('Content-Disposition', 'inline') res.end(file) + } else if (req.method === 'OPTIONS') { + cors(res) + res.end() } } catch (err) { log.warn('Uploader handler error', err) diff --git a/packages/runtime/runtime/src/common/query/wrap-read-model.ts b/packages/runtime/runtime/src/common/query/wrap-read-model.ts index 2e1d814af0..93ce0bff55 100644 --- a/packages/runtime/runtime/src/common/query/wrap-read-model.ts +++ b/packages/runtime/runtime/src/common/query/wrap-read-model.ts @@ -185,7 +185,7 @@ const customReadModelMethods = { if (events == null) { try { log.verbose( - `Applying "Init" event to read-model "${readModelName}" started` + `Started applying the "Init" event to the "${readModelName}" read model` ) try { @@ -205,21 +205,21 @@ const customReadModelMethods = { } } log.debug( - `applying "Init" event to read-model "${readModelName}" succeed` + `applying "Init" event to the "${readModelName}" read model succeed` ) } catch (readModelError) { if (readModelError === OMIT_BATCH) { throw OMIT_BATCH } log.error( - `applying "Init" event to read-model "${readModelName}" failed` + `applying "Init" event to the "${readModelName}" read model failed` ) log.error(readModelError.message) log.verbose(readModelError.stack) const summaryError = readModelError log.verbose( - `Throwing error for "Init" applying to read-model "${readModelName}"`, + `Throwing error for applying "Init" to the "${readModelName}" read model`, summaryError ) throw summaryError @@ -230,7 +230,7 @@ const customReadModelMethods = { for (const event of events) { if (pool.isDisposed) { throw new Error( - `read-model "${readModelName}" updating had been interrupted` + `read model "${readModelName}" updating has been interrupted` ) } if (event == null) { @@ -238,19 +238,19 @@ const customReadModelMethods = { } const remainingTime = pool.getVacantTimeInMillis() log.debug( - `remaining read-model "${readModelName}" feeding time is ${remainingTime} ms` + `remaining read model "${readModelName}" feeding time is ${remainingTime} ms` ) if (remainingTime < 0) { log.debug( - `stop applying events to read-model "${readModelName}" because of timeout` + `stop applying events to the "${readModelName}" read model due to timeout` ) break } try { log.verbose( - `Applying "${event.type}" event to read-model "${readModelName}" started` + `Applying "${event.type}" event to the "${readModelName}" read model started` ) try { const executor = await interop.acquireEventHandler( @@ -274,14 +274,14 @@ const customReadModelMethods = { } } log.debug( - `applying "${event.type}" event to read-model "${readModelName}" succeed` + `applying "${event.type}" event to the "${readModelName}" read model succeed` ) } catch (readModelError) { if (readModelError === OMIT_BATCH) { throw OMIT_BATCH } log.error( - `applying "${event.type}" event to read-model "${readModelName}" failed` + `applying "${event.type}" event to the "${readModelName}" read model failed` ) log.error(readModelError.message) log.verbose(readModelError.stack) @@ -290,7 +290,7 @@ const customReadModelMethods = { summaryError.stack = readModelError.stack log.verbose( - `Throwing error for feeding read-model "${readModelName}"`, + `Throwing error for feeding the "${readModelName}" read model`, summaryError ) throw summaryError @@ -622,7 +622,7 @@ const doOperation = async ( ): Promise => { const readModelName = interop.name if (pool.isDisposed) { - throw new Error(`read-model "${readModelName}" is disposed`) + throw new Error(`the "${readModelName}" read model is disposed`) } let result = null @@ -883,7 +883,7 @@ const dispose = async ( ): Promise => { const readModelName = interop.name if (pool.isDisposed) { - throw new Error(`read-model "${readModelName}" is disposed`) + throw new Error(`read model "${readModelName}" is disposed`) } pool.isDisposed = true @@ -911,7 +911,7 @@ const wrapReadModel = ({ const connector = readModelConnectors[interop.connectorName] if (connector == null) { throw new Error( - `connector "${interop.connectorName}" for read-model "${interop.name}" does not exist` + `connector "${interop.connectorName}" for read model "${interop.name}" does not exist` ) } @@ -962,7 +962,7 @@ const wrapReadModel = ({ ) ) - log.debug(`read-model wrapped successfully`) + log.debug(`read model wrapped successfully`) return Object.freeze(api) } diff --git a/packages/runtime/runtime/src/common/wrap-trie.js b/packages/runtime/runtime/src/common/wrap-trie.js index 04956b7220..7fce29334f 100644 --- a/packages/runtime/runtime/src/common/wrap-trie.js +++ b/packages/runtime/runtime/src/common/wrap-trie.js @@ -34,6 +34,9 @@ const wrapTrie = (apiHandlers, rootPath) => { .define(getRootBasedUrl(rootPath, '/api/commands')) .handle('POST', commandHandler) + trie + .define(getRootBasedUrl(rootPath, '/uploader')) + .handle('OPTIONS', uploaderHandler) trie .define(getRootBasedUrl(rootPath, '/uploader')) .handle('POST', uploaderHandler) diff --git a/packages/runtime/runtime/test/cloud-entry.test.js b/packages/runtime/runtime/test/cloud-entry.test.js index 4d811851f8..c8dc79b13d 100644 --- a/packages/runtime/runtime/test/cloud-entry.test.js +++ b/packages/runtime/runtime/test/cloud-entry.test.js @@ -414,7 +414,7 @@ describe('Cloud entry', () => { expect(result.statusCode).toEqual(409) expect(result.headers).toEqual({ 'Content-Type': 'text/plain' }) - expect(result.body).toContain('is not actual at the moment') + expect(result.body).toContain('is currently out of date') }) test('should fail command via POST /"rootPath"/api/commands/ with CommandError', async () => {