diff --git a/.eslintrc.js b/.eslintrc.js index 73687a1364c..a1b86acba3f 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -53,27 +53,7 @@ module.exports = { 'packages/react-native/__tests__', 'packages/rtn-push-notification/__tests__', 'packages/rtn-web-browser/__tests__', - 'packages/storage/__tests__', - // will enable lint by packages - // 'adapter-nextjs', - // 'packages/analytics', - // 'packages/api', - // 'packages/api-graphql', - // 'packages/api-rest', - // 'packages/auth', - // 'packages/aws-amplify', - // 'packages/core', - 'packages/datastore', - 'packages/datastore-storage-adapter', - // 'packages/geo', - // 'packages/interactions', - // 'packages/notifications', - // 'packages/predictions', - // 'packages/pubsub', - // 'packages/react-native', - // 'packages/rtn-push-notification', - // 'packages/rtn-web-browser', - // 'packages/storage', + // 'packages/storage/__tests__', ], rules: { camelcase: [ diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 00000000000..5b6c82776a8 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,2 @@ +# Run `eslint --fix` against staged files +yarn lint-staged diff --git a/.lintstagedrc.mjs b/.lintstagedrc.mjs new file mode 100644 index 00000000000..f0c53374923 --- /dev/null +++ b/.lintstagedrc.mjs @@ -0,0 +1,3 @@ +export default { + "*.{ts,tsx}": "eslint --fix" +} diff --git a/license_config.json b/license_config.json index f0bac69b3a4..eb706af57ea 100644 --- a/license_config.json +++ b/license_config.json @@ -38,7 +38,8 @@ "**/Gemfile", "**/.rollup.cache", "**/rollup.config.mjs", - "rollup" + "rollup", + ".husky" ], "ignoreFile": ".gitignore", "license": "license_header.txt", diff --git a/package.json b/package.json index 790927a9ca8..8e44eb37c33 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,7 @@ "publish:v5-stable": "lerna publish --conventional-commits --yes --dist-tag=stable-5 --message 'chore(release): Publish [ci skip]' --no-verify-access", "publish:verdaccio": "lerna publish --canary --force-publish --no-push --dist-tag=unstable --preid=unstable --yes", "ts-coverage": "lerna run ts-coverage", - "prepare": "./scripts/set-preid-versions.sh" + "prepare": "husky && ./scripts/set-preid-versions.sh" }, "workspaces": { "packages": [ @@ -103,11 +103,13 @@ "eslint-plugin-promise": "^6.1.1", "eslint-plugin-unused-imports": "^3.0.0", "glob": "^10.3.10", + "husky": "^9.0.11", "jest": "^29.7.0", "jest-environment-jsdom": "^29.7.0", "json-loader": "^0.5.7", "lerna": "^7.4.2", "license-check-and-add": "^4.0.5", + "lint-staged": "^15.2.2", "mkdirp": "^3.0.1", "prettier": "^3.2.5", "rimraf": "^2.6.2", @@ -116,8 +118,6 @@ "terser-webpack-plugin": "^5.3.6", "ts-jest": "^29.1.1", "ts-loader": "^9.4.3", - "tslint": "^5.7.0", - "tslint-config-airbnb": "^5.8.0", "typedoc": "0.25.8", "typedoc-plugin-extras": "^3.0.0", "typedoc-plugin-missing-exports": "^2.2.0", diff --git a/packages/api-graphql/__tests__/fixtures/with-types/API.ts b/packages/api-graphql/__tests__/fixtures/with-types/API.ts index 4219077952c..0cd5bdc0bfd 100644 --- a/packages/api-graphql/__tests__/fixtures/with-types/API.ts +++ b/packages/api-graphql/__tests__/fixtures/with-types/API.ts @@ -1,4 +1,3 @@ -/* tslint:disable */ /* eslint-disable */ // This file was automatically generated and should not be edited. diff --git a/packages/api-graphql/__tests__/fixtures/with-types/mutations.ts b/packages/api-graphql/__tests__/fixtures/with-types/mutations.ts index ee1f361896a..ef885a3cf09 100644 --- a/packages/api-graphql/__tests__/fixtures/with-types/mutations.ts +++ b/packages/api-graphql/__tests__/fixtures/with-types/mutations.ts @@ -1,4 +1,3 @@ -/* tslint:disable */ /* eslint-disable */ // this is an auto generated file. This will be overwritten diff --git a/packages/api-graphql/__tests__/fixtures/with-types/queries.ts b/packages/api-graphql/__tests__/fixtures/with-types/queries.ts index ad0f73c02fb..c728a1a9334 100644 --- a/packages/api-graphql/__tests__/fixtures/with-types/queries.ts +++ b/packages/api-graphql/__tests__/fixtures/with-types/queries.ts @@ -1,4 +1,3 @@ -/* tslint:disable */ /* eslint-disable */ // this is an auto generated file. This will be overwritten diff --git a/packages/api-graphql/__tests__/fixtures/with-types/subscriptions.ts b/packages/api-graphql/__tests__/fixtures/with-types/subscriptions.ts index 89b51c9b56e..e0923a59268 100644 --- a/packages/api-graphql/__tests__/fixtures/with-types/subscriptions.ts +++ b/packages/api-graphql/__tests__/fixtures/with-types/subscriptions.ts @@ -1,4 +1,3 @@ -/* tslint:disable */ /* eslint-disable */ // this is an auto generated file. This will be overwritten diff --git a/packages/api-graphql/__tests__/fixtures/without-types/API.ts b/packages/api-graphql/__tests__/fixtures/without-types/API.ts index 4219077952c..0cd5bdc0bfd 100644 --- a/packages/api-graphql/__tests__/fixtures/without-types/API.ts +++ b/packages/api-graphql/__tests__/fixtures/without-types/API.ts @@ -1,4 +1,3 @@ -/* tslint:disable */ /* eslint-disable */ // This file was automatically generated and should not be edited. diff --git a/packages/api-graphql/__tests__/fixtures/without-types/mutations.ts b/packages/api-graphql/__tests__/fixtures/without-types/mutations.ts index 2388cada15a..2d31c015a6d 100644 --- a/packages/api-graphql/__tests__/fixtures/without-types/mutations.ts +++ b/packages/api-graphql/__tests__/fixtures/without-types/mutations.ts @@ -1,4 +1,3 @@ -/* tslint:disable */ /* eslint-disable */ // this is an auto generated file. This will be overwritten diff --git a/packages/api-graphql/__tests__/fixtures/without-types/queries.ts b/packages/api-graphql/__tests__/fixtures/without-types/queries.ts index 621fcf76404..c48af17b3a1 100644 --- a/packages/api-graphql/__tests__/fixtures/without-types/queries.ts +++ b/packages/api-graphql/__tests__/fixtures/without-types/queries.ts @@ -1,4 +1,3 @@ -/* tslint:disable */ /* eslint-disable */ // this is an auto generated file. This will be overwritten diff --git a/packages/api-graphql/__tests__/fixtures/without-types/subscriptions.ts b/packages/api-graphql/__tests__/fixtures/without-types/subscriptions.ts index b482ea37209..797c4249f72 100644 --- a/packages/api-graphql/__tests__/fixtures/without-types/subscriptions.ts +++ b/packages/api-graphql/__tests__/fixtures/without-types/subscriptions.ts @@ -1,4 +1,3 @@ -/* tslint:disable */ /* eslint-disable */ // this is an auto generated file. This will be overwritten diff --git a/packages/api-graphql/package.json b/packages/api-graphql/package.json index 2f888107e07..3a9b3460bf1 100644 --- a/packages/api-graphql/package.json +++ b/packages/api-graphql/package.json @@ -15,7 +15,7 @@ }, "scripts": { "test": "npm run lint && jest -w 1 --coverage --logHeapUsage", - "test:watch": "tslint 'src/**/*.ts' && jest -w 1 --watch", + "test:watch": "jest -w 1 --watch", "build-with-test": "npm test && npm build", "build:umd": "webpack && webpack --config ./webpack.config.dev.js", "build:esm-cjs": "rollup --forceExit -c rollup.config.mjs", diff --git a/packages/api-rest/package.json b/packages/api-rest/package.json index ac6dbdd3004..cb8d2e60a35 100644 --- a/packages/api-rest/package.json +++ b/packages/api-rest/package.json @@ -12,7 +12,7 @@ }, "scripts": { "test": "npm run lint && jest -w 1 --coverage --logHeapUsage", - "test:watch": "tslint 'src/**/*.ts' && jest -w 1 --watch", + "test:watch": "jest -w 1 --watch", "build-with-test": "npm test && npm build", "build:umd": "webpack && webpack --config ./webpack.config.dev.js", "build:esm-cjs": "rollup --forceExit -c rollup.config.mjs", diff --git a/packages/aws-amplify/package.json b/packages/aws-amplify/package.json index 2674ab2c102..3c6edc74114 100644 --- a/packages/aws-amplify/package.json +++ b/packages/aws-amplify/package.json @@ -293,7 +293,7 @@ "name": "[Analytics] record (Pinpoint)", "path": "./dist/esm/analytics/index.mjs", "import": "{ record }", - "limit": "17.05 kB" + "limit": "17.08 kB" }, { "name": "[Analytics] record (Kinesis)", @@ -317,7 +317,7 @@ "name": "[Analytics] identifyUser (Pinpoint)", "path": "./dist/esm/analytics/index.mjs", "import": "{ identifyUser }", - "limit": "15.53 kB" + "limit": "15.57 kB" }, { "name": "[Analytics] enable", diff --git a/packages/core/__tests__/providers/pinpoint/apis/testUtils/getExpectedInput.ts b/packages/core/__tests__/providers/pinpoint/apis/testUtils/getExpectedInput.ts index 4fe37a696e8..c9e1e8dd8d3 100644 --- a/packages/core/__tests__/providers/pinpoint/apis/testUtils/getExpectedInput.ts +++ b/packages/core/__tests__/providers/pinpoint/apis/testUtils/getExpectedInput.ts @@ -3,7 +3,6 @@ import { appId, - clientDemographic, endpointId as defaultEndpointId, uuid, } from '../../testUtils/data'; @@ -12,7 +11,7 @@ export const getExpectedInput = ({ address, attributes, channelType, - demographic = clientDemographic as any, + demographic, endpointId = defaultEndpointId, location, metrics, @@ -28,30 +27,36 @@ export const getExpectedInput = ({ EffectiveDate: expect.any(String), ChannelType: channelType, Address: address, - Attributes: attributes, - Demographic: { - AppVersion: demographic.appVersion, - Locale: demographic.locale, - Make: demographic.make, - Model: demographic.model, - ModelVersion: demographic.modelVersion ?? demographic.version, - Platform: demographic.platform, - PlatformVersion: demographic.platformVersion, - Timezone: demographic.timezone, - }, - Location: { - City: location?.city, - Country: location?.country, - Latitude: location?.latitude, - Longitude: location?.longitude, - PostalCode: location?.postalCode, - Region: location?.region, - }, + ...(attributes && { Attributes: attributes }), + ...(demographic && { + Demographic: { + AppVersion: demographic.appVersion, + Locale: demographic.locale, + Make: demographic.make, + Model: demographic.model, + ModelVersion: demographic.modelVersion ?? demographic.version, + Platform: demographic.platform, + PlatformVersion: demographic.platformVersion, + Timezone: demographic.timezone, + }, + }), + ...(location && { + Location: { + City: location?.city, + Country: location?.country, + Latitude: location?.latitude, + Longitude: location?.longitude, + PostalCode: location?.postalCode, + Region: location?.region, + }, + }), Metrics: metrics, OptOut: optOut, - User: { - UserId: userId, - UserAttributes: userAttributes, - }, + ...((userId || userAttributes) && { + User: { + UserId: userId, + UserAttributes: userAttributes, + }, + }), }), }); diff --git a/packages/core/__tests__/providers/pinpoint/apis/updateEndpoint.test.ts b/packages/core/__tests__/providers/pinpoint/apis/updateEndpoint.test.ts index 261344cfede..22a70814b92 100644 --- a/packages/core/__tests__/providers/pinpoint/apis/updateEndpoint.test.ts +++ b/packages/core/__tests__/providers/pinpoint/apis/updateEndpoint.test.ts @@ -17,7 +17,9 @@ import { clientDemographic, credentials, endpointId, + identityId, region, + userAttributes, userId, userProfile, uuid, @@ -130,7 +132,8 @@ describe('Pinpoint Provider API: updateEndpoint', () => { ); }); - it('merges demographics', async () => { + it('merges demographics with client info on endpoint creation', async () => { + mockGetEndpointId.mockReturnValue(undefined); const partialDemographic = { ...demographic } as any; delete partialDemographic.make; delete partialDemographic.model; @@ -146,6 +149,7 @@ describe('Pinpoint Provider API: updateEndpoint', () => { expect(mockClientUpdateEndpoint).toHaveBeenCalledWith( { credentials, region }, getExpectedInput({ + endpointId: createdEndpointId, demographic: { ...demographic, make: clientDemographic.make, @@ -155,6 +159,58 @@ describe('Pinpoint Provider API: updateEndpoint', () => { ); }); + it('does not merge demographics with client info on endpoint update', async () => { + const partialDemographic = { ...demographic } as any; + delete partialDemographic.make; + delete partialDemographic.model; + await updateEndpoint({ + appId, + category, + credentials, + region, + userProfile: { + demographic: partialDemographic, + }, + }); + expect(mockClientUpdateEndpoint).toHaveBeenCalledWith( + { credentials, region }, + getExpectedInput({ demographic: partialDemographic }), + ); + }); + + it('falls back to idenity id on endpoint creation', async () => { + mockGetEndpointId.mockReturnValue(undefined); + await updateEndpoint({ + appId, + category, + credentials, + identityId, + region, + }); + expect(mockClientUpdateEndpoint).toHaveBeenCalledWith( + { credentials, region }, + getExpectedInput({ + endpointId: createdEndpointId, + userId: identityId, + }), + ); + }); + + it('does not fall back to idenity id on endpoint update', async () => { + await updateEndpoint({ + appId, + category, + credentials, + identityId, + region, + userAttributes, + }); + expect(mockClientUpdateEndpoint).toHaveBeenCalledWith( + { credentials, region }, + getExpectedInput({ userAttributes }), + ); + }); + it('creates an endpoint if one is not already cached', async () => { mockGetEndpointId.mockReturnValue(undefined); await updateEndpoint({ appId, category, credentials, region }); diff --git a/packages/core/__tests__/providers/pinpoint/testUtils/data.ts b/packages/core/__tests__/providers/pinpoint/testUtils/data.ts index 677d0913477..1b6a68b9456 100644 --- a/packages/core/__tests__/providers/pinpoint/testUtils/data.ts +++ b/packages/core/__tests__/providers/pinpoint/testUtils/data.ts @@ -27,6 +27,9 @@ export const event = { }; export const identityId = 'identity-id'; export const region = 'region'; +export const userAttributes = { + attr: ['attr-value-one', 'attr-value-two'], +}; export const userId = 'user-id'; export const userProfile = { customProperties: { diff --git a/packages/core/src/providers/pinpoint/apis/updateEndpoint.ts b/packages/core/src/providers/pinpoint/apis/updateEndpoint.ts index 88a9370e9f8..100e1de4fe1 100644 --- a/packages/core/src/providers/pinpoint/apis/updateEndpoint.ts +++ b/packages/core/src/providers/pinpoint/apis/updateEndpoint.ts @@ -1,12 +1,13 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { amplifyUuid } from '../../../utils/amplifyUuid'; -import { getClientInfo } from '../../../utils/getClientInfo'; import { UpdateEndpointInput, updateEndpoint as clientUpdateEndpoint, } from '../../../awsClients/pinpoint'; +import { UserProfile } from '../../../types'; +import { amplifyUuid } from '../../../utils/amplifyUuid'; +import { getClientInfo } from '../../../utils/getClientInfo'; import { PinpointUpdateEndpointInput } from '../types'; import { cacheEndpointId } from '../utils/cacheEndpointId'; import { @@ -46,22 +47,34 @@ export const updateEndpoint = async ({ name, plan, } = userProfile ?? {}; - const clientInfo = getClientInfo(); + + // only automatically populate the endpoint with client info and identity id upon endpoint creation to + // avoid overwriting the endpoint with these values every time the endpoint is updated + const demographicsFromClientInfo: UserProfile['demographic'] = {}; + const resolvedUserId = createdEndpointId ? userId ?? identityId : userId; + if (createdEndpointId) { + const clientInfo = getClientInfo(); + demographicsFromClientInfo.appVersion = clientInfo.appVersion; + demographicsFromClientInfo.make = clientInfo.make; + demographicsFromClientInfo.model = clientInfo.model; + demographicsFromClientInfo.modelVersion = clientInfo.version; + demographicsFromClientInfo.platform = clientInfo.platform; + } const mergedDemographic = { - appVersion: clientInfo.appVersion, - make: clientInfo.make, - model: clientInfo.model, - modelVersion: clientInfo.version, - platform: clientInfo.platform, + ...demographicsFromClientInfo, ...demographic, }; - const shouldAddAttributes = email || customProperties || name || plan; const attributes = { ...(email && { email: [email] }), ...(name && { name: [name] }), ...(plan && { plan: [plan] }), ...customProperties, }; + + const shouldAddDemographics = createdEndpointId || demographic; + const shouldAddAttributes = email || customProperties || name || plan; + const shouldAddUser = resolvedUserId || userAttributes; + const input: UpdateEndpointInput = { ApplicationId: appId, EndpointId: endpointId ?? createdEndpointId, @@ -70,31 +83,37 @@ export const updateEndpoint = async ({ EffectiveDate: new Date().toISOString(), ChannelType: channelType, Address: address, - Attributes: shouldAddAttributes ? attributes : undefined, - Demographic: { - AppVersion: mergedDemographic.appVersion, - Locale: mergedDemographic.locale, - Make: mergedDemographic.make, - Model: mergedDemographic.model, - ModelVersion: mergedDemographic.modelVersion, - Platform: mergedDemographic.platform, - PlatformVersion: mergedDemographic.platformVersion, - Timezone: mergedDemographic.timezone, - }, - Location: { - City: location?.city, - Country: location?.country, - Latitude: location?.latitude, - Longitude: location?.longitude, - PostalCode: location?.postalCode, - Region: location?.region, - }, + ...(shouldAddAttributes && { Attributes: attributes }), + ...(shouldAddDemographics && { + Demographic: { + AppVersion: mergedDemographic.appVersion, + Locale: mergedDemographic.locale, + Make: mergedDemographic.make, + Model: mergedDemographic.model, + ModelVersion: mergedDemographic.modelVersion, + Platform: mergedDemographic.platform, + PlatformVersion: mergedDemographic.platformVersion, + Timezone: mergedDemographic.timezone, + }, + }), + ...(location && { + Location: { + City: location.city, + Country: location.country, + Latitude: location.latitude, + Longitude: location.longitude, + PostalCode: location.postalCode, + Region: location.region, + }, + }), Metrics: metrics, OptOut: optOut, - User: { - UserId: userId ?? identityId, - UserAttributes: userAttributes, - }, + ...(shouldAddUser && { + User: { + UserId: resolvedUserId, + UserAttributes: userAttributes, + }, + }), }, }; try { diff --git a/packages/core/src/providers/pinpoint/index.ts b/packages/core/src/providers/pinpoint/index.ts index 86a1a5b8d82..7deb03e4fc4 100644 --- a/packages/core/src/providers/pinpoint/index.ts +++ b/packages/core/src/providers/pinpoint/index.ts @@ -7,4 +7,4 @@ export { PinpointServiceOptions, UpdateEndpointException, } from './types'; -export { resolveEndpointId } from './utils'; +export { getEndpointId, resolveEndpointId } from './utils'; diff --git a/packages/datastore-storage-adapter/.npmignore b/packages/datastore-storage-adapter/.npmignore index 92866a5d2e7..32131a48f33 100644 --- a/packages/datastore-storage-adapter/.npmignore +++ b/packages/datastore-storage-adapter/.npmignore @@ -8,6 +8,5 @@ node_modules/** *.log tsconfig.json tsfmt.json -tslint.json typeDoc.js -webpack.config.js \ No newline at end of file +webpack.config.js diff --git a/packages/datastore-storage-adapter/package.json b/packages/datastore-storage-adapter/package.json index ceb169e7d60..44d8746d563 100644 --- a/packages/datastore-storage-adapter/package.json +++ b/packages/datastore-storage-adapter/package.json @@ -18,7 +18,8 @@ "build": "npm run clean && npm run build:esm-cjs && npm run build:umd", "clean": "rimraf dist lib lib-esm", "format": "echo \"Not implemented\"", - "lint": "tslint '{__tests__,src}/**/*.ts' && npm run ts-coverage", + "lint": "eslint '**/*.{ts,tsx}' && npm run ts-coverage", + "lint:fix": "eslint '**/*.{ts,tsx}' --fix", "ts-coverage": "typescript-coverage-report -p ./tsconfig.build.json -t 94.16" }, "repository": { diff --git a/packages/datastore-storage-adapter/src/ExpoSQLiteAdapter/ExpoSQLiteAdapter.ts b/packages/datastore-storage-adapter/src/ExpoSQLiteAdapter/ExpoSQLiteAdapter.ts index c767ed80981..785e4bc8901 100644 --- a/packages/datastore-storage-adapter/src/ExpoSQLiteAdapter/ExpoSQLiteAdapter.ts +++ b/packages/datastore-storage-adapter/src/ExpoSQLiteAdapter/ExpoSQLiteAdapter.ts @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { CommonSQLiteAdapter } from '../common/CommonSQLiteAdapter'; + import ExpoSQLiteDatabase from './ExpoSQLiteDatabase'; const ExpoSQLiteAdapter: CommonSQLiteAdapter = new CommonSQLiteAdapter( diff --git a/packages/datastore-storage-adapter/src/ExpoSQLiteAdapter/ExpoSQLiteDatabase.ts b/packages/datastore-storage-adapter/src/ExpoSQLiteAdapter/ExpoSQLiteDatabase.ts index b4536b5f838..4271f3b4387 100644 --- a/packages/datastore-storage-adapter/src/ExpoSQLiteAdapter/ExpoSQLiteDatabase.ts +++ b/packages/datastore-storage-adapter/src/ExpoSQLiteAdapter/ExpoSQLiteDatabase.ts @@ -3,7 +3,8 @@ import { ConsoleLogger } from '@aws-amplify/core'; import { PersistentModel } from '@aws-amplify/datastore'; import { deleteAsync, documentDirectory } from 'expo-file-system'; -import { openDatabase, WebSQLDatabase } from 'expo-sqlite'; +import { WebSQLDatabase, openDatabase } from 'expo-sqlite'; + import { DB_NAME } from '../common/constants'; import { CommonSQLiteDatabase, ParameterizedStatement } from '../common/types'; @@ -11,9 +12,9 @@ const logger = new ConsoleLogger('ExpoSQLiteDatabase'); /* -Note: -ExpoSQLite transaction error callbacks require returning a boolean value to indicate whether the -error was handled or not. Returning a true value indicates the error was handled and does not +Note: +ExpoSQLite transaction error callbacks require returning a boolean value to indicate whether the +error was handled or not. Returning a true value indicates the error was handled and does not rollback the whole transaction. */ @@ -56,6 +57,7 @@ class ExpoSQLiteDatabase implements CommonSQLiteDatabase { params: (string | number)[], ): Promise { const results: T[] = await this.getAll(statement, params); + return results[0]; } @@ -74,6 +76,7 @@ class ExpoSQLiteDatabase implements CommonSQLiteDatabase { (_, error) => { reject(error); logger.warn(error); + return true; }, ); @@ -93,6 +96,7 @@ class ExpoSQLiteDatabase implements CommonSQLiteDatabase { (_, error) => { reject(error); logger.warn(error); + return true; }, ); @@ -101,24 +105,27 @@ class ExpoSQLiteDatabase implements CommonSQLiteDatabase { } public batchQuery( - queryParameterizedStatements: Set = new Set(), + queryParameterizedStatements = new Set(), ): Promise { - return new Promise((resolveTransaction, rejectTransaction) => { + return new Promise((resolve, reject) => { + const resolveTransaction = resolve; + const rejectTransaction = reject; this.db.transaction(async transaction => { try { const results: any[] = await Promise.all( [...queryParameterizedStatements].map( ([statement, params]) => - new Promise((resolve, reject) => { + new Promise((_resolve, _reject) => { transaction.executeSql( statement, params, (_, result) => { - resolve(result.rows._array[0]); + _resolve(result.rows._array[0]); }, (_, error) => { - reject(error); + _reject(error); logger.warn(error); + return true; }, ); @@ -135,26 +142,29 @@ class ExpoSQLiteDatabase implements CommonSQLiteDatabase { } public batchSave( - saveParameterizedStatements: Set = new Set(), + saveParameterizedStatements = new Set(), deleteParameterizedStatements?: Set, ): Promise { - return new Promise((resolveTransaction, rejectTransaction) => { + return new Promise((resolve, reject) => { + const resolveTransaction = resolve; + const rejectTransaction = reject; this.db.transaction(async transaction => { try { // await for all sql statements promises to resolve await Promise.all( [...saveParameterizedStatements].map( ([statement, params]) => - new Promise((resolve, reject) => { + new Promise((_resolve, _reject) => { transaction.executeSql( statement, params, () => { - resolve(null); + _resolve(null); }, (_, error) => { - reject(error); + _reject(error); logger.warn(error); + return true; }, ); @@ -165,20 +175,21 @@ class ExpoSQLiteDatabase implements CommonSQLiteDatabase { await Promise.all( [...deleteParameterizedStatements].map( ([statement, params]) => - new Promise((resolve, reject) => + new Promise((_resolve, _reject) => { transaction.executeSql( statement, params, () => { - resolve(null); + _resolve(null); }, (_, error) => { - reject(error); + _reject(error); logger.warn(error); + return true; }, - ), - ), + ); + }), ), ); } @@ -198,33 +209,37 @@ class ExpoSQLiteDatabase implements CommonSQLiteDatabase { const [queryStatement, queryParams] = queryParameterizedStatement; const [deleteStatement, deleteParams] = deleteParameterizedStatement; - return new Promise((resolveTransaction, rejectTransaction) => { + return new Promise((resolve, reject) => { + const resolveTransaction = resolve; + const rejectTransaction = reject; this.db.transaction(async transaction => { try { - const result: T[] = await new Promise((resolve, reject) => { + const result: T[] = await new Promise((_resolve, _reject) => { transaction.executeSql( queryStatement, queryParams, - (_, result) => { - resolve(result.rows._array || []); + (_, sqlResult) => { + _resolve(sqlResult.rows._array || []); }, (_, error) => { - reject(error); + _reject(error); logger.warn(error); + return true; }, ); }); - await new Promise((resolve, reject) => { + await new Promise((_resolve, _reject) => { transaction.executeSql( deleteStatement, deleteParams, () => { - resolve(null); + _resolve(null); }, (_, error) => { - reject(error); + _reject(error); logger.warn(error); + return true; }, ); @@ -239,21 +254,24 @@ class ExpoSQLiteDatabase implements CommonSQLiteDatabase { } private executeStatements(statements: string[]): Promise { - return new Promise((resolveTransaction, rejectTransaction) => { + return new Promise((resolve, reject) => { + const resolveTransaction = resolve; + const rejectTransaction = reject; this.db.transaction(async transaction => { try { await Promise.all( statements.map( statement => - new Promise((resolve, reject) => { + new Promise((_resolve, _reject) => { transaction.executeSql( statement, [], () => { - resolve(null); + _resolve(null); }, (_, error) => { - reject(error); + _reject(error); + return true; }, ); diff --git a/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteAdapter.ts b/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteAdapter.ts index b84e39ffb47..8d2823f0f05 100644 --- a/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteAdapter.ts +++ b/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteAdapter.ts @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { CommonSQLiteAdapter } from '../common/CommonSQLiteAdapter'; + import SQLiteDatabase from './SQLiteDatabase'; const SQLiteAdapter: CommonSQLiteAdapter = new CommonSQLiteAdapter( diff --git a/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteDatabase.ts b/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteDatabase.ts index 869f8be816f..c43483df97b 100644 --- a/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteDatabase.ts +++ b/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteDatabase.ts @@ -3,6 +3,7 @@ import SQLite from 'react-native-sqlite-storage'; import { ConsoleLogger } from '@aws-amplify/core'; import { PersistentModel } from '@aws-amplify/datastore'; + import { DB_NAME } from '../common/constants'; import { CommonSQLiteDatabase, ParameterizedStatement } from '../common/types'; @@ -16,7 +17,7 @@ if (ConsoleLogger.LOG_LEVEL === 'DEBUG') { /* -Note: +Note: I purposely avoided using arrow functions () => {} in this class, Because I ran into issues with them in some of the SQLite method callbacks @@ -41,7 +42,7 @@ class SQLiteDatabase implements CommonSQLiteDatabase { } public async createSchema(statements: string[]): Promise { - return await this.executeStatements(statements); + await this.executeStatements(statements); } public async clear(): Promise { @@ -56,6 +57,7 @@ class SQLiteDatabase implements CommonSQLiteDatabase { params: (string | number)[], ): Promise { const results: T[] = await this.getAll(statement, params); + return results[0]; } @@ -138,7 +140,14 @@ class SQLiteDatabase implements CommonSQLiteDatabase { }, logger.warn, ); - tx.executeSql(deleteStatement, deleteParams, () => {}, logger.warn); + tx.executeSql( + deleteStatement, + deleteParams, + () => { + // no-op + }, + logger.warn, + ); }); return results; diff --git a/packages/datastore-storage-adapter/src/common/CommonSQLiteAdapter.ts b/packages/datastore-storage-adapter/src/common/CommonSQLiteAdapter.ts index 1caab5f0c8a..e0f49b0c591 100644 --- a/packages/datastore-storage-adapter/src/common/CommonSQLiteAdapter.ts +++ b/packages/datastore-storage-adapter/src/common/CommonSQLiteAdapter.ts @@ -2,26 +2,13 @@ // SPDX-License-Identifier: Apache-2.0 import { ConsoleLogger } from '@aws-amplify/core'; import { - generateSchemaStatements, - queryByIdStatement, - modelUpdateStatement, - modelInsertStatement, - queryAllStatement, - queryOneStatement, - deleteByIdStatement, - deleteByPredicateStatement, -} from '../common/SQLiteUtils'; - -import { - StorageAdapter, + InternalSchema, ModelInstanceCreator, + ModelPredicate, ModelPredicateCreator, ModelSortPredicateCreator, - InternalSchema, - isPredicateObj, - ModelPredicate, - NamespaceResolver, NAMESPACES, + NamespaceResolver, OpType, PaginationInput, PersistentModel, @@ -29,12 +16,26 @@ import { PredicateObject, PredicatesGroup, QueryOne, + StorageAdapter, + isPredicateObj, utils, } from '@aws-amplify/datastore'; + +import { + deleteByIdStatement, + deleteByPredicateStatement, + generateSchemaStatements, + modelInsertStatement, + modelUpdateStatement, + queryAllStatement, + queryByIdStatement, + queryOneStatement, +} from '../common/SQLiteUtils'; + import { CommonSQLiteDatabase, - ParameterizedStatement, ModelInstanceMetadataWithId, + ParameterizedStatement, } from './types'; const { traverseModel, validatePredicate, isModelConstructor } = utils; @@ -49,6 +50,7 @@ export class CommonSQLiteAdapter implements StorageAdapter { namsespaceName: string, modelName: string, ) => PersistentModelConstructor; + private db: CommonSQLiteDatabase; private initPromise: Promise; private resolve: (value?: any) => void; @@ -68,12 +70,13 @@ export class CommonSQLiteAdapter implements StorageAdapter { ) => PersistentModelConstructor, ) { if (!this.initPromise) { - this.initPromise = new Promise((res, rej) => { - this.resolve = res; - this.reject = rej; + this.initPromise = new Promise((_resolve, _reject) => { + this.resolve = _resolve; + this.reject = _reject; }); } else { await this.initPromise; + return; } this.schema = theSchema; @@ -86,6 +89,7 @@ export class CommonSQLiteAdapter implements StorageAdapter { this.schema.namespaces.user.models, ).some(model => Object.values(model.fields).some(field => + // eslint-disable-next-line no-prototype-builtins field.association?.hasOwnProperty('targetNames'), ), ); @@ -155,13 +159,19 @@ export class CommonSQLiteAdapter implements StorageAdapter { const { modelName, item, instance } = resItem; const { id } = item; - const [queryStatement, params] = queryByIdStatement(id, modelName); - const fromDB = await this.db.get(queryStatement, params); + const [queryStatementForRestItem, paramsForRestItem] = queryByIdStatement( + id, + modelName, + ); + const fromDBForRestItem = await this.db.get( + queryStatementForRestItem, + paramsForRestItem, + ); const opType: OpType = - fromDB === undefined ? OpType.INSERT : OpType.UPDATE; + fromDBForRestItem === undefined ? OpType.INSERT : OpType.UPDATE; - const saveStatement = fromDB + const saveStatement = fromDBForRestItem ? modelUpdateStatement(instance, modelName) : modelInsertStatement(instance, modelName); @@ -205,6 +215,7 @@ export class CommonSQLiteAdapter implements StorageAdapter { for (const r of relations) { delete record[r.fieldName]; } + return this.modelInstanceCreator(modelConstructor, record); }); } @@ -228,9 +239,10 @@ export class CommonSQLiteAdapter implements StorageAdapter { const queryById = predicates && this.idFromPredicate(predicates); - const records: T[] = await (async () => { + const records: T[] = (await (async () => { if (queryById) { const record = await this.getById(tableName, queryById); + return record ? [record] : []; } @@ -242,10 +254,10 @@ export class CommonSQLiteAdapter implements StorageAdapter { page, ); - return await this.db.getAll(queryStatement, params); - })(); + return this.db.getAll(queryStatement, params); + })()) as T[]; - return await this.load(namespaceName, modelConstructor.name, records); + return this.load(namespaceName, modelConstructor.name, records); } private async getById( @@ -396,14 +408,15 @@ export class CommonSQLiteAdapter implements StorageAdapter { const { id, _deleted } = item; const { instance } = connectedModels.find( - ({ instance }) => instance.id === id, + ({ instance: connectedModelInstance }) => + connectedModelInstance.id === id, ); if (_deleted) { // create the delete statements right away const deleteStatement = deleteByIdStatement(instance.id, tableName); deleteStatements.add(deleteStatement); - result.push([(item), OpType.DELETE]); + result.push([item as unknown as T, OpType.DELETE]); } else { // query statements for the saves at first const queryStatement = queryByIdStatement(id, tableName); @@ -423,14 +436,14 @@ export class CommonSQLiteAdapter implements StorageAdapter { tableName, ); saveStatements.add(insertStatement); - result.push([(itemsToSave[idx]), OpType.INSERT]); + result.push([itemsToSave[idx] as unknown as T, OpType.INSERT]); } else { const updateStatement = modelUpdateStatement( itemsToSave[idx], tableName, ); saveStatements.add(updateStatement); - result.push([(itemsToSave[idx]), OpType.UPDATE]); + result.push([itemsToSave[idx] as unknown as T, OpType.UPDATE]); } }); diff --git a/packages/datastore-storage-adapter/src/common/SQLiteUtils.ts b/packages/datastore-storage-adapter/src/common/SQLiteUtils.ts index 431b4918fff..c89cfc49bb6 100644 --- a/packages/datastore-storage-adapter/src/common/SQLiteUtils.ts +++ b/packages/datastore-storage-adapter/src/common/SQLiteUtils.ts @@ -1,24 +1,24 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { + GraphQLScalarType, InternalSchema, - SchemaModel, + ModelAttributeAuth, + ModelAuthRule, ModelField, PersistentModel, - isGraphQLScalarType, - QueryOne, + PredicateObject, PredicatesGroup, - isPredicateObj, + QueryOne, + SchemaModel, SortPredicatesGroup, - PredicateObject, - isPredicateGroup, + isGraphQLScalarType, + isModelAttributeAuth, isModelFieldType, + isPredicateGroup, + isPredicateObj, isTargetNameAssociation, - isModelAttributeAuth, - ModelAttributeAuth, - ModelAuthRule, utils, - GraphQLScalarType, } from '@aws-amplify/datastore'; import { ParameterizedStatement } from './types'; @@ -43,6 +43,7 @@ const updateSet: (model: any) => [any, any] = model => { .filter(([k]) => k !== 'id') .map(([k, v]) => { values.push(prepareValueForDML(v)); + return `"${k}"=?`; }) .join(', '); @@ -97,9 +98,10 @@ export function getSQLiteType( return 'TEXT'; case 'Float': return 'REAL'; - default: + default: { const _: never = scalar as never; throw new Error(`unknown type ${scalar as string}`); + } } } @@ -136,13 +138,14 @@ export const implicitAuthFieldsForModel: (model: SchemaModel) => string[] = ( const authFieldExplicitlyDefined = Object.values(model.fields).find( (f: ModelField) => f.name === authField, ); + return !authFieldExplicitlyDefined; }); }; export function modelCreateTableStatement( model: SchemaModel, - userModel: boolean = false, + userModel = false, ): string { // implicitly defined auth fields, e.g., `owner`, `groupsField`, etc. const implicitAuthFields = implicitAuthFieldsForModel(model); @@ -210,6 +213,7 @@ export function modelCreateTableStatement( const createTableStatement = `CREATE TABLE IF NOT EXISTS "${ model.name }" (${fields.join(', ')});`; + return createTableStatement; } @@ -316,7 +320,7 @@ export const whereConditionFromPredicateObject = ({ return [`"${field}" ${comparisonOperator} ?`, [operand]]; } - const logicalOperatorKey = operator; + const logicalOperatorKey = operator as keyof typeof logicalOperatorMap; const logicalOperator = logicalOperatorMap[logicalOperatorKey]; @@ -339,10 +343,11 @@ export const whereConditionFromPredicateObject = ({ case 'notContains': statement = [`instr("${field}", ?) ${logicalOperator}`, [operand]]; break; - default: + default: { const _: never = logicalOperatorKey; // Incorrect WHERE clause can result in data loss throw new Error('Cannot map predicate to a valid WHERE clause'); + } } return statement; @@ -361,13 +366,14 @@ export function whereClauseFromPredicate( return [whereClause, params]; function recurse( - predicate: PredicatesGroup | PredicateObject, - result = [], - params = [], + recursedPredicate: PredicatesGroup | PredicateObject, + recursedResult = [], + recursedParams = [], ): void { - if (isPredicateGroup(predicate)) { - const { type: groupType, predicates: groupPredicates } = predicate; - let filterType: string = ''; + if (isPredicateGroup(recursedPredicate)) { + const { type: groupType, predicates: groupPredicates } = + recursedPredicate; + let filterType = ''; let isNegation = false; switch (groupType) { case 'not': @@ -379,25 +385,26 @@ export function whereClauseFromPredicate( case 'or': filterType = 'OR'; break; - default: + default: { const _: never = groupType as never; throw new Error(`Invalid ${groupType}`); + } } const groupResult = []; for (const p of groupPredicates) { - recurse(p, groupResult, params); + recurse(p, groupResult, recursedParams); } - result.push( + recursedResult.push( `${isNegation ? 'NOT' : ''}(${groupResult.join(` ${filterType} `)})`, ); - } else if (isPredicateObj(predicate)) { + } else if (isPredicateObj(recursedPredicate)) { const [condition, conditionParams] = - whereConditionFromPredicateObject(predicate); + whereConditionFromPredicateObject(recursedPredicate); - result.push(condition); + recursedResult.push(condition); - params.push(...conditionParams); + recursedParams.push(...conditionParams); } } } @@ -423,7 +430,7 @@ export function orderByClauseFromSort( export function limitClauseFromPagination( limit: number, - page: number = 0, + page = 0, ): ParameterizedStatement { const params = [limit]; let clause = 'LIMIT ?'; @@ -483,6 +490,7 @@ export function deleteByIdStatement( tableName: string, ): ParameterizedStatement { const deleteStatement = `DELETE FROM "${tableName}" WHERE "id"=?`; + return [deleteStatement, [id]]; } @@ -498,5 +506,6 @@ export function deleteByPredicateStatement( statement += ` ${whereClause}`; params.push(...whereParams); } + return [statement, params]; } diff --git a/packages/datastore-storage-adapter/src/common/types.ts b/packages/datastore-storage-adapter/src/common/types.ts index 2957233176b..c1bce70b876 100644 --- a/packages/datastore-storage-adapter/src/common/types.ts +++ b/packages/datastore-storage-adapter/src/common/types.ts @@ -1,6 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { PersistentModel, ModelInstanceMetadata } from '@aws-amplify/datastore'; +import { ModelInstanceMetadata, PersistentModel } from '@aws-amplify/datastore'; export interface CommonSQLiteDatabase { init(): Promise; diff --git a/packages/datastore-storage-adapter/src/index.ts b/packages/datastore-storage-adapter/src/index.ts index 064a0f2b12c..19ba93a509b 100644 --- a/packages/datastore-storage-adapter/src/index.ts +++ b/packages/datastore-storage-adapter/src/index.ts @@ -1,4 +1,5 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import SQLiteAdapter from './SQLiteAdapter/SQLiteAdapter'; + export { SQLiteAdapter }; diff --git a/packages/datastore-storage-adapter/tslint.json b/packages/datastore-storage-adapter/tslint.json deleted file mode 100644 index 8eafab1d2b4..00000000000 --- a/packages/datastore-storage-adapter/tslint.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "defaultSeverity": "error", - "plugins": ["prettier"], - "extends": [], - "jsRules": {}, - "rules": { - "prefer-const": true, - "max-line-length": [true, 120], - "no-empty-interface": true, - "no-var-keyword": true, - "object-literal-shorthand": true, - "no-eval": true, - "space-before-function-paren": [ - true, - { - "anonymous": "never", - "named": "never" - } - ], - "no-parameter-reassignment": true, - "align": [true, "parameters"], - "no-duplicate-imports": true, - "one-variable-per-declaration": [false, "ignore-for-loop"], - "triple-equals": [true, "allow-null-check"], - "comment-format": [true, "check-space"], - "indent": [false], - "whitespace": [ - false, - "check-branch", - "check-decl", - "check-operator", - "check-preblock" - ], - "eofline": true, - "variable-name": [ - true, - "check-format", - "allow-pascal-case", - "allow-snake-case", - "allow-leading-underscore" - ], - "semicolon": [ - true, - "always", - "ignore-interfaces", - "ignore-bound-class-methods" - ] - }, - "rulesDirectory": [] -} diff --git a/packages/datastore/.npmignore b/packages/datastore/.npmignore index 92866a5d2e7..32131a48f33 100644 --- a/packages/datastore/.npmignore +++ b/packages/datastore/.npmignore @@ -8,6 +8,5 @@ node_modules/** *.log tsconfig.json tsfmt.json -tslint.json typeDoc.js -webpack.config.js \ No newline at end of file +webpack.config.js diff --git a/packages/datastore/__tests__/DataStore/sanityCheck.test.ts b/packages/datastore/__tests__/DataStore/sanityCheck.test.ts index 53fb6cddceb..281c4767db1 100644 --- a/packages/datastore/__tests__/DataStore/sanityCheck.test.ts +++ b/packages/datastore/__tests__/DataStore/sanityCheck.test.ts @@ -196,7 +196,6 @@ describe('DataStore sanity testing checks', () => { await DataStore.start(); }); - // tslint:disable-next-line: max-line-length test(`starting after unawaited clear results in a DX-friendly error (${connectedState}, ${environment})`, async () => { ({ DataStore, Post } = getDataStore({ online, isNode })); await DataStore.start(); @@ -210,7 +209,6 @@ describe('DataStore sanity testing checks', () => { await clearing; }); - // tslint:disable-next-line: max-line-length test(`starting after unawaited stop results in a DX-friendly error (${connectedState}, ${environment})`, async () => { ({ DataStore, Post } = getDataStore({ online, isNode })); await DataStore.start(); diff --git a/packages/datastore/package.json b/packages/datastore/package.json index c35621eacab..43b726a77f5 100644 --- a/packages/datastore/package.json +++ b/packages/datastore/package.json @@ -24,7 +24,8 @@ "clean": "npm run clean:size && rimraf dist lib lib-esm", "clean:size": "rimraf dual-publish-tmp tmp*", "format": "echo \"Not implemented\" && npm run ts-coverage", - "lint": "tslint '{__tests__,src}/**/*.ts' && npm run ts-coverage", + "lint": "eslint '**/*.{ts,tsx}' && npm run ts-coverage", + "lint:fix": "eslint '**/*.{ts,tsx}' --fix", "ts-coverage": "typescript-coverage-report -p ./tsconfig.build.json -t 92.05" }, "repository": { diff --git a/packages/datastore/src/authModeStrategies/multiAuthStrategy.ts b/packages/datastore/src/authModeStrategies/multiAuthStrategy.ts index 14ac0a1dbc3..c850b59da5c 100644 --- a/packages/datastore/src/authModeStrategies/multiAuthStrategy.ts +++ b/packages/datastore/src/authModeStrategies/multiAuthStrategy.ts @@ -1,14 +1,15 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { fetchAuthSession } from '@aws-amplify/core'; +import { GraphQLAuthMode } from '@aws-amplify/core/internals/utils'; + import { + AmplifyContext, AuthModeStrategy, + ModelAttributeAuthAllow, ModelAttributeAuthProperty, ModelAttributeAuthProvider, - ModelAttributeAuthAllow, - AmplifyContext, } from '../types'; -import { GraphQLAuthMode } from '@aws-amplify/core/internals/utils'; function getProviderFromRule( rule: ModelAttributeAuthProperty, @@ -21,6 +22,7 @@ function getProviderFromRule( if (rule.allow === 'public' && !rule.provider) { return ModelAttributeAuthProvider.API_KEY; } + return rule.provider!; } @@ -48,6 +50,7 @@ function sortAuthRulesWithPriority(rules: ModelAttributeAuthProperty[]) { providerSortPriority.indexOf(getProviderFromRule(b)) ); } + return ( allowSortPriority.indexOf(a.allow) - allowSortPriority.indexOf(b.allow) ); @@ -138,7 +141,7 @@ function getAuthRules({ export const multiAuthStrategy: ( amplifyContext: AmplifyContext, ) => AuthModeStrategy = - (amplifyContext: AmplifyContext) => + () => async ({ schema, modelName }) => { let currentUser; try { @@ -164,5 +167,6 @@ export const multiAuthStrategy: ( return getAuthRules({ currentUser, rules: sortedRules }); } } + return []; }; diff --git a/packages/datastore/src/datastore/datastore.ts b/packages/datastore/src/datastore/datastore.ts index 8e77e85b2e7..8882e9a4446 100644 --- a/packages/datastore/src/datastore/datastore.ts +++ b/packages/datastore/src/datastore/datastore.ts @@ -1,110 +1,108 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { InternalAPI } from '@aws-amplify/api/internals'; -import { Amplify, Hub, Cache, ConsoleLogger } from '@aws-amplify/core'; - +import { Amplify, Cache, ConsoleLogger, Hub } from '@aws-amplify/core'; import { Draft, + Patch, + enablePatches, immerable, produce, setAutoFreeze, - enablePatches, - Patch, } from 'immer'; -import { amplifyUuid, isBrowser } from '@aws-amplify/core/internals/utils'; +import { + BackgroundProcessManager, + amplifyUuid, +} from '@aws-amplify/core/internals/utils'; import { Observable, SubscriptionLike, filter } from 'rxjs'; + import { defaultAuthStrategy, multiAuthStrategy } from '../authModeStrategies'; import { - isPredicatesAll, ModelPredicateCreator, ModelSortPredicateCreator, PredicateAll, + isPredicatesAll, } from '../predicates'; import { Adapter } from '../storage/adapter'; import { ExclusiveStorage as Storage } from '../storage/storage'; import { ModelRelationship } from '../storage/relationship'; import { ControlMessage, SyncEngine } from '../sync'; import { + AmplifyContext, AuthModeStrategy, + AuthModeStrategyType, ConflictHandler, DataStoreConfig, + DataStoreSnapshot, + ErrorHandler, GraphQLScalarType, + IdentifierFieldOrIdentifierObject, InternalSchema, - isGraphQLScalarType, - isSchemaModelWithAttributes, + ManagedIdentifier, ModelFieldType, ModelInit, ModelInstanceMetadata, ModelPredicate, - ModelField, - SortPredicate, + ModelPredicateExtender, MutableModel, NamespaceResolver, NonModelTypeConstructor, - ProducerPaginationInput, + ObserveQueryOptions, PaginationInput, PersistentModel, PersistentModelConstructor, - ProducerModelPredicate, + PersistentModelMetaData, + ProducerPaginationInput, + RecursiveModelPredicateExtender, Schema, SchemaModel, SchemaNamespace, SchemaNonModel, + SortPredicate, SubscriptionMessage, - DataStoreSnapshot, SyncConflict, SyncError, - TypeConstructorMap, - ErrorHandler, SyncExpression, - AuthModeStrategyType, - isNonModelFieldType, - isModelFieldType, - ObserveQueryOptions, - ManagedIdentifier, - PersistentModelMetaData, - IdentifierFieldOrIdentifierObject, + TypeConstructorMap, + isGraphQLScalarType, isIdentifierObject, - AmplifyContext, - isFieldAssociation, - RecursiveModelPredicateExtender, - ModelPredicateExtender, + isModelFieldType, + isNonModelFieldType, + isSchemaModelWithAttributes, } from '../types'; -// tslint:disable:no-duplicate-imports import type { __modelMeta__ } from '../types'; -import { isNode } from './utils'; - import { DATASTORE, - errorMessages, - establishRelationAndKeys, - isModelConstructor, - monotonicUlidFactory, + DeferredCallbackResolver, NAMESPACES, STORAGE, SYNC, USER, - isNullOrUndefined, - registerNonModelClass, - sortCompareFunction, - DeferredCallbackResolver, - inMemoryPagination, + errorMessages, + establishRelationAndKeys, extractPrimaryKeyFieldNames, extractPrimaryKeysAndValues, + getTimestampFields, + inMemoryPagination, isIdManaged, isIdOptionallyManaged, + isModelConstructor, + isNullOrUndefined, mergePatches, - getTimestampFields, + monotonicUlidFactory, + registerNonModelClass, + sortCompareFunction, } from '../util'; import { - recursivePredicateFor, - predicateFor, GroupCondition, internals, + predicateFor, + recursivePredicateFor, } from '../predicates/next'; import { getIdentifierValue } from '../sync/utils'; import DataStoreConnectivity from '../sync/datastoreConnectivity'; -import { BackgroundProcessManager } from '@aws-amplify/core/internals/utils'; + +import { isNode } from './utils'; setAutoFreeze(true); enablePatches(); @@ -113,10 +111,10 @@ const logger = new ConsoleLogger('DataStore'); const ulid = monotonicUlidFactory(Date.now()); -type SettingMetaData = { +interface SettingMetaData { identifier: ManagedIdentifier; readOnlyFields: never; -}; +} declare class Setting { public readonly [__modelMeta__]: SettingMetaData; constructor(init: ModelInit); @@ -124,6 +122,7 @@ declare class Setting { src: Setting, mutator: (draft: MutableModel) => void | Setting, ): Setting; + public readonly id: string; public readonly key: string; public readonly value: string; @@ -178,6 +177,7 @@ const namespaceResolver: NamespaceResolver = modelConstructor => { `Namespace Resolver for '${modelConstructor.name}' not found! This is probably a bug in '@amplify-js/datastore'.`, ); } + return resolver; }; @@ -221,6 +221,8 @@ const buildSeedPredicate = ( }; // exporting syncClasses for testing outbox.test.ts +// TODO(eslint): refactor not to export non-constant +// eslint-disable-next-line import/no-mutable-exports export let syncClasses: TypeConstructorMap; let userClasses: TypeConstructorMap; let dataStoreClasses: TypeConstructorMap; @@ -282,6 +284,7 @@ export function attached( } else { result && attachedModelInstances.set(result, attachment); } + return result; } @@ -355,10 +358,10 @@ const initSchema = (userSchema: Schema) => { field => field.association && field.association.connectionType === 'BELONGS_TO' && - (field.type).model !== model.name, + (field.type as ModelFieldType).model !== model.name, ) .forEach(field => - connectedModels.push((field.type).model), + connectedModels.push((field.type as ModelFieldType).model), ); modelAssociations.set(model.name, connectedModels); @@ -367,7 +370,7 @@ const initSchema = (userSchema: Schema) => { // (such as predicate builders) don't have to reach back into "DataStore" space // to go looking for it. Object.values(model.fields).forEach(field => { - const relatedModel = userClasses[(field.type).model]; + const relatedModel = userClasses[(field.type as ModelFieldType).model]; if (isModelConstructor(relatedModel)) { Object.defineProperty(field.type, 'modelConstructor', { get: () => { @@ -376,6 +379,7 @@ const initSchema = (userSchema: Schema) => { throw new Error( `Could not find model definition for ${relatedModel.name}`, ); + return { builder: relatedModel, schema: relatedModelDefinition, @@ -390,8 +394,8 @@ const initSchema = (userSchema: Schema) => { // index fields into the model definition. // definition.cloudFields = { ...definition.fields }; - const indexes = - schema.namespaces[namespace].relationships![model.name].indexes; + const { indexes } = + schema.namespaces[namespace].relationships![model.name]; const indexFields = new Set(); for (const index of indexes) { @@ -488,7 +492,7 @@ const checkSchemaCodegenVersion = (codegenVersion: string) => { let isValid = false; try { const versionParts = codegenVersion.split('.'); - const [major, minor, patch, patchrevision] = versionParts; + const [major, minor] = versionParts; isValid = Number(major) === majorVersion && Number(minor) >= minorVersion; } catch (err) { console.log(`Error parsing codegen version: ${codegenVersion}\n${err}`); @@ -546,12 +550,12 @@ export declare type ModelInstanceCreator = typeof modelInstanceCreator; const instancesMetadata = new WeakSet>(); function modelInstanceCreator( - modelConstructor: PersistentModelConstructor, + ModelConstructor: PersistentModelConstructor, init: Partial, ): T { instancesMetadata.add(init); - return new modelConstructor(>>init); + return new ModelConstructor(init as ModelInit>); } const validateModelFields = @@ -597,6 +601,7 @@ const validateModelFields = if (typeof v === 'string') { try { JSON.parse(v); + return; } catch (error) { throw new Error(`Field ${name} is an invalid JSON object. ${v}`); @@ -618,11 +623,11 @@ const validateModelFields = if ( !isNullOrUndefined(v) && - (<[]>v).some(e => + (v as []).some(e => isNullOrUndefined(e) ? isRequired : typeof e !== jsType, ) ) { - const elemTypes = (<[]>v) + const elemTypes = (v as []) .map(e => (e === null ? 'null' : typeof e)) .join(','); @@ -632,7 +637,7 @@ const validateModelFields = } if (validateScalar && !isNullOrUndefined(v)) { - const validationStatus = (<[]>v).map(e => { + const validationStatus = (v as []).map(e => { if (!isNullOrUndefined(e)) { return validateScalar(e); } else if (isNullOrUndefined(e) && !isRequired) { @@ -649,7 +654,7 @@ const validateModelFields = } } } else if (!isRequired && v === undefined) { - return; + // no-op for this branch but still to filter this branch out } else if (typeof v !== jsType && v !== null) { throw new Error( `Field ${name} should be of type ${jsType}, ${typeof v} received. ${v}`, @@ -771,7 +776,7 @@ const initializeInstance = ( const parsedValue = castInstanceType(modelDefinition, k, v); modelValidator(k, parsedValue); - (draft)[k] = parsedValue; + (draft as any)[k] = parsedValue; }); }; @@ -799,14 +804,14 @@ const normalize = ( draft: Draft, ) => { for (const k of Object.keys(modelDefinition.fields)) { - if (draft[k] === undefined) (draft)[k] = null; + if (draft[k] === undefined) (draft as any)[k] = null; } }; const createModelClass = ( modelDefinition: SchemaModel, ) => { - const clazz = >(class Model { + const clazz = class Model { constructor(init: ModelInit) { // we create a base instance first so we can distinguish which fields were explicitly // set by customer code versus those set by normalization. only those fields @@ -822,10 +827,12 @@ const createModelClass = ( const modelInstanceMetadata: ModelInstanceMetadata = isInternallyInitialized - ? (init) - : {}; + ? (init as unknown as ModelInstanceMetadata) + : ({} as ModelInstanceMetadata); - type ModelWithIDIdentifier = { id: string }; + interface ModelWithIDIdentifier { + id: string; + } const { id: _id } = modelInstanceMetadata as unknown as ModelWithIDIdentifier; @@ -839,10 +846,10 @@ const createModelClass = ( ? amplifyUuid() : ulid(); - ((draft)).id = id; + (draft as unknown as ModelWithIDIdentifier).id = id; } else if (isIdOptionallyManaged(modelDefinition)) { // only auto-populate if the id was not provided - ((draft)).id = + (draft as unknown as ModelWithIDIdentifier).id = draft.id || amplifyUuid(); } @@ -868,8 +875,9 @@ const createModelClass = ( // "cloud managed" fields, like createdAt and updatedAt.) const normalized = produce( baseInstance, - (draft: Draft) => - normalize(modelDefinition, draft), + (draft: Draft) => { + normalize(modelDefinition, draft); + }, ); initPatches.set(normalized, patches); @@ -889,7 +897,7 @@ const createModelClass = ( const model = produce( source, draft => { - fn(>draft); + fn(draft as MutableModel); const keyNames = extractPrimaryKeyFieldNames(modelDefinition); // Keys are immutable @@ -900,7 +908,7 @@ const createModelClass = ( { source }, ); } - (draft as Object)[key] = source[key]; + (draft as object)[key] = source[key]; }); const modelValidator = validateModelFields(modelDefinition); @@ -962,7 +970,7 @@ const createModelClass = ( return attached(instance, ModelAttachment.DataStore); } - }); + } as unknown as PersistentModelConstructor; clazz[immerable] = true; @@ -977,7 +985,7 @@ const createModelClass = ( pkField: extractPrimaryKeyFieldNames(modelDefinition), }); for (const relationship of allModelRelationships) { - const field = relationship.field; + const { field } = relationship; Object.defineProperty(clazz.prototype, modelDefinition.fields[field].name, { set(model: T | undefined | null) { @@ -989,7 +997,7 @@ const createModelClass = ( // Avoid validation error when processing AppSync response with nested // selection set. Nested entitites lack version field and can not be validated // TODO: explore a more reliable method to solve this - if (model.hasOwnProperty('_version')) { + if (Object.prototype.hasOwnProperty.call(model, '_version')) { const modelConstructor = Object.getPrototypeOf(model || {}) .constructor as PersistentModelConstructor; @@ -1035,7 +1043,7 @@ const createModelClass = ( // if the memos already has a result for this field, we'll use it. // there is no "cache" invalidation of any kind; memos are permanent to // keep an immutable perception of the instance. - if (!instanceMemos.hasOwnProperty(field)) { + if (!Object.prototype.hasOwnProperty.call(instanceMemos, field)) { // before we populate the memo, we need to know where to look for relatives. // today, this only supports DataStore. Models aren't managed elsewhere in Amplify. if (getAttachment(this) === ModelAttachment.DataStore) { @@ -1047,12 +1055,14 @@ const createModelClass = ( relationship.remoteModelConstructor as PersistentModelConstructor, base => base.and(q => { - return relationship.remoteJoinFields.map((field, index) => { - // TODO: anything we can use instead of `any` here? - return (q[field] as T[typeof field]).eq( - this[relationship.localJoinFields[index]], - ); - }); + return relationship.remoteJoinFields.map( + (joinField, index) => { + // TODO: anything we can use instead of `any` here? + return (q[joinField] as T[typeof joinField]).eq( + this[relationship.localJoinFields[index]], + ); + }, + ); }), ); @@ -1109,9 +1119,9 @@ export class AsyncItem extends Promise {} * This collection can be async-iterated or turned directly into an array using `toArray()`. */ export class AsyncCollection implements AsyncIterable { - private values: Array | Promise>; + private values: any[] | Promise; - constructor(values: Array | Promise>) { + constructor(values: any[] | Promise) { this.values = values; } @@ -1129,6 +1139,7 @@ export class AsyncCollection implements AsyncIterable { [Symbol.asyncIterator](): AsyncIterator { let values; let index = 0; + return { next: async () => { if (!values) values = await this.values; @@ -1138,8 +1149,10 @@ export class AsyncCollection implements AsyncIterable { done: false, }; index++; + return result; } + return { value: null, done: true, @@ -1169,6 +1182,7 @@ export class AsyncCollection implements AsyncIterable { break; } } + return output; } } @@ -1206,7 +1220,7 @@ const checkReadOnlyPropertyOnUpdate = ( const createNonModelClass = ( typeDefinition: SchemaNonModel, ) => { - const clazz = >(class Model { + const clazz = class Model { constructor(init: ModelInit) { const instance = produce( this, @@ -1217,7 +1231,7 @@ const createNonModelClass = ( return instance; } - }); + } as unknown as NonModelTypeConstructor; clazz[immerable] = true; @@ -1235,6 +1249,7 @@ function isQueryOne(obj: any): obj is string { function defaultConflictHandler(conflictData: SyncConflict): PersistentModel { const { localModel, modelConstructor, remoteModel } = conflictData; const { _version } = remoteModel; + return modelInstanceCreator(modelConstructor, { ...localModel, _version }); } @@ -1291,14 +1306,14 @@ async function checkSchemaVersion( storage: Storage, version: string, ): Promise { - const Setting = + const SettingCtor = dataStoreClasses.Setting as PersistentModelConstructor; const modelDefinition = schema.namespaces[DATASTORE].models.Setting; await storage.runExclusive(async s => { const [schemaVersionSetting] = await s.query( - Setting, + SettingCtor, ModelPredicateCreator.createFromAST(modelDefinition, { and: { key: { eq: SETTING_SCHEMA_VERSION } }, }), @@ -1316,7 +1331,7 @@ async function checkSchemaVersion( } } else { await s.save( - modelInstanceCreator(Setting, { + modelInstanceCreator(SettingCtor, { key: SETTING_SCHEMA_VERSION, value: JSON.stringify(version), }), @@ -1394,8 +1409,8 @@ class DataStore { private errorHandler!: (error: SyncError) => void; private fullSyncInterval!: number; private initialized?: Promise; - private initReject!: Function; - private initResolve!: Function; + private initReject!: () => void; + private initResolve!: () => void; private maxRecordsToSync!: number; private storage?: Storage; private sync?: SyncEngine; @@ -1403,12 +1418,14 @@ class DataStore { private syncExpressions!: SyncExpression[]; private syncPredicates: WeakMap | null> = new WeakMap>(); + private sessionId?: string; private storageAdapter!: Adapter; // object that gets passed to descendent classes. Allows us to pass these down by reference private amplifyContext: AmplifyContext = { InternalAPI: this.InternalAPI, }; + private connectivityMonitor?: DataStoreConnectivity; /** @@ -1501,12 +1518,13 @@ class DataStore { this.state = DataStoreState.Starting; if (this.initialized === undefined) { logger.debug('Starting DataStore'); - this.initialized = new Promise((res, rej) => { - this.initResolve = res; - this.initReject = rej; + this.initialized = new Promise((resolve, reject) => { + this.initResolve = resolve; + this.initReject = reject; }); } else { await this.initialized; + return; } @@ -1629,7 +1647,7 @@ class DataStore { throw new Error('No storage to query'); } - //#region Input validation + // #region Input validation if (!isValidModelConstructor(modelConstructor)) { const msg = 'Constructor is not for a valid model'; @@ -1675,10 +1693,10 @@ class DataStore { ); } else { // Object is being queried using object literal syntax - if (isIdentifierObject(identifierOrCriteria, modelDefinition)) { + if (isIdentifierObject(identifierOrCriteria as T, modelDefinition)) { const predicate = ModelPredicateCreator.createForPk( modelDefinition, - identifierOrCriteria, + identifierOrCriteria as T, ); result = await this.storage.query( modelConstructor, @@ -1710,7 +1728,7 @@ class DataStore { } } - //#endregion + // #endregion const returnOne = isQueryOne(identifierOrCriteria) || @@ -1757,7 +1775,9 @@ class DataStore { | undefined = updatedPatchesTuple || initPatchesTuple; const modelConstructor: PersistentModelConstructor | undefined = - model ? >model.constructor : undefined; + model + ? (model.constructor as PersistentModelConstructor) + : undefined; if (!isValidModelConstructor(modelConstructor)) { const msg = 'Object is not an instance of a valid model'; @@ -1816,12 +1836,8 @@ class DataStore { : undefined; const [savedModel] = await this.storage.runExclusive(async s => { - const saved = await s.save( - model, - producedCondition, - undefined, - patchesTuple, - ); + await s.save(model, producedCondition, undefined, patchesTuple); + return s.query( modelConstructor, ModelPredicateCreator.createForPk(modelDefinition, model), @@ -1942,7 +1958,7 @@ class DataStore { if (isIdentifierObject(identifierOrCriteria, modelDefinition)) { condition = ModelPredicateCreator.createForPk( modelDefinition, - identifierOrCriteria, + identifierOrCriteria as T, ); } else { condition = internals( @@ -2057,11 +2073,11 @@ class DataStore { : undefined; if (modelOrConstructor && modelConstructor === undefined) { - const model = modelOrConstructor; - const modelConstructor = - model && (Object.getPrototypeOf(model)).constructor; + const model = modelOrConstructor as T; + const resolvedModelConstructor = + model && (Object.getPrototypeOf(model) as object).constructor; - if (isValidModelConstructor(modelConstructor)) { + if (isValidModelConstructor(resolvedModelConstructor)) { if (identifierOrCriteria) { logger.warn('idOrCriteria is ignored when using a model instance', { model, @@ -2069,7 +2085,7 @@ class DataStore { }); } - return this.observe(modelConstructor, model.id); + return this.observe(resolvedModelConstructor, model.id); } else { const msg = 'The model is not an instance of a PersistentModelConstructor'; @@ -2169,8 +2185,12 @@ class DataStore { observer.next(message as SubscriptionMessage); } }, 'datastore observe message handler'), - error: err => observer.error(err), - complete: () => observer.complete(), + error: err => { + observer.error(err); + }, + complete: () => { + observer.complete(); + }, }); }, 'datastore observe observable initialization') .catch(this.handleAddProcError('DataStore.observe()')) @@ -2189,13 +2209,11 @@ class DataStore { }); }; - observeQuery: { - ( - modelConstructor: PersistentModelConstructor, - criteria?: RecursiveModelPredicateExtender | typeof PredicateAll, - paginationProducer?: ObserveQueryOptions, - ): Observable>; - } = ( + observeQuery: ( + modelConstructor: PersistentModelConstructor, + criteria?: RecursiveModelPredicateExtender | typeof PredicateAll, + paginationProducer?: ObserveQueryOptions, + ) => Observable> = ( model: PersistentModelConstructor, criteria?: RecursiveModelPredicateExtender | typeof PredicateAll, options?: ObserveQueryOptions, @@ -2264,10 +2282,11 @@ class DataStore { // to have visibility into items that move from in-set to out-of-set. // We need to explicitly remove those items from the existing snapshot. handle = this.observe(model).subscribe( - ({ element, model, opType }) => + ({ element, model: observedModel, opType }) => this.runningProcesses.isOpen && this.runningProcesses.add(async () => { - const itemModelDefinition = getModelDefinition(model)!; + const itemModelDefinition = + getModelDefinition(observedModel)!; const idOrPk = getIdentifierValue( itemModelDefinition, element, @@ -2302,7 +2321,7 @@ class DataStore { } const isSynced = - this.sync?.getModelSyncedStatus(model) ?? false; + this.sync?.getModelSyncedStatus(observedModel) ?? false; const limit = itemsChanged.size - deletedItemIds.length >= @@ -2391,8 +2410,11 @@ class DataStore { * @param itemsToSort A array of model type. */ const sortItems = (itemsToSort: T[]): void => { - const modelDefinition = getModelDefinition(model); - const pagination = this.processPagination(modelDefinition!, options); + const sortingModelDefinition = getModelDefinition(model); + const pagination = this.processPagination( + sortingModelDefinition!, + options, + ); const sortPredicates = ModelSortPredicateCreator.getPredicates( pagination!.sort!, @@ -2438,8 +2460,6 @@ class DataStore { const { DataStore: configDataStore, authModeStrategyType: configAuthModeStrategyType, - conflictHandler: configConflictHandler, - errorHandler: configErrorHandler, maxRecordsToSync: configMaxRecordsToSync, syncPageSize: configSyncPageSize, fullSyncInterval: configFullSyncInterval, @@ -2700,6 +2720,7 @@ class DataStore { ): Promise> { try { const condition = await conditionProducer(); + return condition || conditionProducer; } catch (error) { if (error instanceof TypeError) { @@ -2719,6 +2740,7 @@ class DataStore { `You can only utilize one Sync Expression per model. Subsequent sync expressions for the ${name} model will be ignored.`, ); + return map; } diff --git a/packages/datastore/src/index.ts b/packages/datastore/src/index.ts index 81f67de2dbc..1e721b5e50f 100644 --- a/packages/datastore/src/index.ts +++ b/packages/datastore/src/index.ts @@ -1,10 +1,19 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { + USER, + isModelConstructor, + isNonModelConstructor, + traverseModel, + validatePredicate, +} from './util'; + export { DataStore, DataStoreClass, initSchema, ModelInstanceCreator, + // eslint-disable-next-line import/export AsyncCollection, AsyncItem, } from './datastore/datastore'; @@ -16,14 +25,6 @@ export { } from './predicates'; export { Adapter as StorageAdapter } from './storage/adapter'; -import { - traverseModel, - validatePredicate, - USER, - isNonModelConstructor, - isModelConstructor, -} from './util'; - export { NAMESPACES } from './util'; export const utils = { @@ -34,4 +35,5 @@ export const utils = { isModelConstructor, }; +// eslint-disable-next-line import/export export * from './types'; diff --git a/packages/datastore/src/predicates/index.ts b/packages/datastore/src/predicates/index.ts index db85998f027..ef547da94f3 100644 --- a/packages/datastore/src/predicates/index.ts +++ b/packages/datastore/src/predicates/index.ts @@ -39,6 +39,7 @@ const groupKeys = new Set(['and', 'or', 'not']); */ const isGroup = o => { const keys = [...Object.keys(o)]; + return keys.length === 1 && groupKeys.has(keys[0]); }; @@ -77,6 +78,7 @@ export const comparisonKeys = new Set([ */ const isComparison = o => { const keys = [...Object.keys(o)]; + return !Array.isArray(o) && keys.length === 1 && comparisonKeys.has(keys[0]); }; @@ -98,11 +100,12 @@ export const PredicateAll = Symbol('A predicate that matches all records'); export class Predicates { public static get ALL(): typeof PredicateAll { + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions const predicate = >(c => c); predicatesAllSet.add(predicate); - return (predicate); + return predicate as unknown as typeof PredicateAll; } } @@ -140,7 +143,7 @@ export class ModelPredicateCreator { */ static getPredicates( predicate: ModelPredicate, - throwOnInvalid: boolean = true, + throwOnInvalid = true, ) { if (throwOnInvalid && !ModelPredicateCreator.isValidPredicate(predicate)) { throw new Error('The predicate is not valid'); @@ -167,6 +170,7 @@ export class ModelPredicateCreator { const predicate = this.createFromAST(modelDefinition, { and: keyFields.map((field, idx) => { const operand = keyValues[idx]; + return { [field]: { eq: operand } }; }), }); @@ -190,6 +194,7 @@ export class ModelPredicateCreator { const ast = { and: Object.entries(flatEqualities).map(([k, v]) => ({ [k]: { eq: v } })), }; + return this.createFromAST(modelDefinition, ast); } @@ -231,12 +236,14 @@ export class ModelPredicateCreator { const children = this.transformGraphQLFilterNodeToPredicateAST( gql[groupkey], ); + return { type: groupkey, predicates: Array.isArray(children) ? children : [children], }; } else if (isComparison(gql)) { const operatorKey = Object.keys(gql)[0]; + return { operator: operatorKey, operand: gql[operatorKey], @@ -246,6 +253,7 @@ export class ModelPredicateCreator { return gql.map(o => this.transformGraphQLFilterNodeToPredicateAST(o)); } else { const fieldKey = Object.keys(gql)[0]; + return { field: fieldKey, ...this.transformGraphQLFilterNodeToPredicateAST(gql[fieldKey]), diff --git a/packages/datastore/src/predicates/next.ts b/packages/datastore/src/predicates/next.ts index 08362dc9df6..78e44763fda 100644 --- a/packages/datastore/src/predicates/next.ts +++ b/packages/datastore/src/predicates/next.ts @@ -1,38 +1,38 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { - PersistentModel, + AllFieldOperators, ModelFieldType, ModelMeta, - ModelPredicate as StoragePredicate, - AllFieldOperators, - PredicateInternalsKey, V5ModelPredicate as ModelPredicate, + PersistentModel, + PredicateInternalsKey, RecursiveModelPredicate, - RecursiveModelPredicateExtender, RecursiveModelPredicateAggregateExtender, + RecursiveModelPredicateExtender, + ModelPredicate as StoragePredicate, } from '../types'; +import { ExclusiveStorage as StorageAdapter } from '../storage/storage'; +import { ModelRelationship } from '../storage/relationship'; +import { asyncEvery, asyncSome } from '../util'; import { ModelPredicateCreator as FlatModelPredicateCreator, comparisonKeys, } from './index'; -import { ExclusiveStorage as StorageAdapter } from '../storage/storage'; -import { ModelRelationship } from '../storage/relationship'; -import { asyncSome, asyncEvery } from '../util'; const ops = [...comparisonKeys] as AllFieldOperators[]; type GroupOperator = 'and' | 'or' | 'not'; -type UntypedCondition = { - fetch: (storage: StorageAdapter) => Promise[]>; - matches: (item: Record) => Promise; +interface UntypedCondition { + fetch(storage: StorageAdapter): Promise[]>; + matches(item: Record): Promise; copy( extract?: GroupCondition, ): [UntypedCondition, GroupCondition | undefined]; toAST(): any; -}; +} /** * A map from keys (exposed to customers) to the internal predicate data @@ -52,6 +52,7 @@ const predicateInternalsMap = new Map(); const registerPredicateInternals = (condition: GroupCondition, key?: any) => { const finalKey = key || new PredicateInternalsKey(); predicateInternalsMap.set(finalKey, condition); + return finalKey; }; @@ -72,6 +73,7 @@ export const internals = (key: any) => { "Invalid predicate. Terminate your predicate with a valid condition (e.g., `p => p.field.eq('value')`) or pass `Predicates.ALL`.", ); } + return predicateInternalsMap.get(key)!; }; @@ -113,7 +115,7 @@ export class FieldCondition { * @param extract Not used. Present only to fulfill the `UntypedCondition` interface. * @returns A new, identitical `FieldCondition`. */ - copy(extract?: GroupCondition): [FieldCondition, GroupCondition | undefined] { + copy(): [FieldCondition, GroupCondition | undefined] { return [ new FieldCondition(this.field, this.operator, [...this.operands]), undefined, @@ -191,7 +193,8 @@ export class FieldCondition { * @param storage N/A. If ever implemented, the storage adapter to query. * @returns N/A. If ever implemented, return items from `storage` that match. */ - async fetch(storage: StorageAdapter): Promise[]> { + async fetch(): Promise[]> { + // eslint-disable-next-line prefer-promise-reject-errors return Promise.reject('No implementation needed [yet].'); } @@ -217,6 +220,7 @@ export class FieldCondition { const operation = operations[this.operator as keyof typeof operations]; if (operation) { const result = operation(); + return result; } else { throw new Error(`Invalid operator given: ${this.operator}`); @@ -234,6 +238,7 @@ export class FieldCondition { */ const argumentCount = count => { const argsClause = count === 1 ? 'argument is' : 'arguments are'; + return () => { if (this.operands.length !== count) { return `Exactly ${count} ${argsClause} required.`; @@ -278,6 +283,7 @@ export class FieldCondition { */ const getGroupId = (() => { let seed = 1; + return () => `group_${seed++}`; })(); @@ -345,7 +351,7 @@ export class GroupCondition { * This is used to guard against infinitely fetch -> optimize -> fetch * recursion. */ - public isOptimized: boolean = false, + public isOptimized = false, ) {} /** @@ -386,6 +392,7 @@ export class GroupCondition { */ withFieldConditionsOnly(negate: boolean) { const negateChildren = negate !== (this.operator === 'not'); + return new GroupCondition( this.model, undefined, @@ -495,7 +502,7 @@ export class GroupCondition { return this.optimized().fetch(storage); } - const resultGroups: Array[]> = []; + const resultGroups: Record[][] = []; const operator = (negate ? negations[this.operator] : this.operator) as | 'or' @@ -564,7 +571,7 @@ export class GroupCondition { const relationship = ModelRelationship.from(this.model, g.field); - type JoinCondition = { [x: string]: { eq: any } }; + type JoinCondition = Record; if (relationship) { const allJoinConditions: { and: JoinCondition[] }[] = []; for (const relative of relatives) { @@ -665,7 +672,7 @@ export class GroupCondition { */ async matches( item: Record, - ignoreFieldName: boolean = false, + ignoreFieldName = false, ): Promise { const itemToCheck = this.field && !ignoreFieldName ? await item[this.field] : item; @@ -686,6 +693,7 @@ export class GroupCondition { return true; } } + return false; } @@ -699,6 +707,7 @@ export class GroupCondition { 'Invalid arguments! `not()` accepts exactly one predicate expression.', ); } + return !(await this.operands[0].matches(itemToCheck)); } else { throw new Error('Invalid group operator!'); @@ -769,7 +778,7 @@ export class GroupCondition { */ export function recursivePredicateFor( ModelType: ModelMeta, - allowRecursion: boolean = true, + allowRecursion = true, field?: string, query?: GroupCondition, tail?: GroupCondition, @@ -788,15 +797,16 @@ export function recursivePredicateFor( registerPredicateInternals(baseCondition, link); const copyLink = () => { - const [query, newTail] = baseCondition.copy(tailCondition); + const [copiedQuery, newTail] = baseCondition.copy(tailCondition); const newLink = recursivePredicateFor( ModelType, allowRecursion, undefined, - query, + copiedQuery, newTail, ); - return { query, newTail, newLink }; + + return { query: copiedQuery, newTail, newLink }; }; // Adds .or() and .and() methods to the link. @@ -805,7 +815,7 @@ export function recursivePredicateFor( link[op] = (builder: RecursiveModelPredicateAggregateExtender) => { // or() and and() will return a copy of the original link // to head off mutability concerns. - const { query, newTail } = copyLink(); + const { query: copiedLinkQuery, newTail } = copyLink(); const childConditions = builder( recursivePredicateFor(ModelType, allowRecursion), @@ -829,7 +839,7 @@ export function recursivePredicateFor( ); // FinalPredicate - return registerPredicateInternals(query); + return registerPredicateInternals(copiedLinkQuery); }; }); @@ -839,7 +849,7 @@ export function recursivePredicateFor( ): PredicateInternalsKey => { // not() will return a copy of the original link // to head off mutability concerns. - const { query, newTail } = copyLink(); + const { query: copiedLinkQuery, newTail } = copyLink(); // unlike and() and or(), the customer will supply a "singular" child predicate. // the difference being: not() does not accept an array of predicate-like objects. @@ -853,7 +863,7 @@ export function recursivePredicateFor( // A `FinalModelPredicate`. // Return a thing that can no longer be extended, but instead used to `async filter(items)` // or query storage: `.__query.fetch(storage)`. - return registerPredicateInternals(query); + return registerPredicateInternals(copiedLinkQuery); }; // For each field on the model schema, we want to add a getter @@ -881,7 +891,7 @@ export function recursivePredicateFor( [operator]: (...operands: any[]) => { // build off a fresh copy of the existing `link`, just in case // the same link is being used elsewhere by the customer. - const { query, newTail } = copyLink(); + const { query: copiedLinkQuery, newTail } = copyLink(); // normalize operands. if any of the values are `undefiend`, use // `null` instead, because that's what will be stored cross-platform. @@ -898,7 +908,7 @@ export function recursivePredicateFor( // A `FinalModelPredicate`. // Return a thing that can no longer be extended, but instead used to `async filter(items)` // or query storage: `.__query.fetch(storage)`. - return registerPredicateInternals(query); + return registerPredicateInternals(copiedLinkQuery); }, }; }, {}); @@ -945,6 +955,7 @@ export function recursivePredicateFor( newquery, newtail, ); + return newlink; } else { throw new Error( diff --git a/packages/datastore/src/predicates/sort.ts b/packages/datastore/src/predicates/sort.ts index 99f0e47b077..aadb5146cf0 100644 --- a/packages/datastore/src/predicates/sort.ts +++ b/packages/datastore/src/predicates/sort.ts @@ -2,10 +2,10 @@ // SPDX-License-Identifier: Apache-2.0 import { PersistentModel, - SchemaModel, - SortPredicate, ProducerSortPredicate, + SchemaModel, SortDirection, + SortPredicate, SortPredicatesGroup, } from '../types'; @@ -21,32 +21,29 @@ export class ModelSortPredicateCreator { const { name: modelName } = modelDefinition; const fieldNames = new Set(Object.keys(modelDefinition.fields)); - let handler: ProxyHandler>; - const predicate = new Proxy( - {} as SortPredicate, - (handler = { - get(_target, propertyKey, receiver: SortPredicate) { - const field = propertyKey as keyof T; + const predicate = new Proxy({} as SortPredicate, { + get(_target, propertyKey, receiver: SortPredicate) { + const field = propertyKey as keyof T; - if (!fieldNames.has(field)) { - throw new Error( - `Invalid field for model. field: ${String( - field, - )}, model: ${modelName}`, - ); - } + if (!fieldNames.has(field)) { + throw new Error( + `Invalid field for model. field: ${String( + field, + )}, model: ${modelName}`, + ); + } - const result = (sortDirection: SortDirection) => { - ModelSortPredicateCreator.sortPredicateGroupsMap - .get(receiver) - ?.push({ field, sortDirection }); + const result = (sortDirection: SortDirection) => { + ModelSortPredicateCreator.sortPredicateGroupsMap + .get(receiver) + ?.push({ field, sortDirection }); - return receiver; - }; - return result; - }, - }), - ); + return receiver; + }; + + return result; + }, + }); ModelSortPredicateCreator.sortPredicateGroupsMap.set(predicate, []); @@ -61,7 +58,7 @@ export class ModelSortPredicateCreator { static getPredicates( predicate: SortPredicate, - throwOnInvalid: boolean = true, + throwOnInvalid = true, ): SortPredicatesGroup { if ( throwOnInvalid && diff --git a/packages/datastore/src/storage/adapter/AsyncStorageAdapter.ts b/packages/datastore/src/storage/adapter/AsyncStorageAdapter.ts index 9be4d353755..b903c55dc2f 100644 --- a/packages/datastore/src/storage/adapter/AsyncStorageAdapter.ts +++ b/packages/datastore/src/storage/adapter/AsyncStorageAdapter.ts @@ -1,6 +1,5 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import AsyncStorageDatabase from './AsyncStorageDatabase'; import { ModelInstanceMetadata, ModelPredicate, @@ -13,21 +12,27 @@ import { } from '../../types'; import { DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR, - traverseModel, - validatePredicate, + getIndexKeys, + getStorename, inMemoryPagination, keysEqual, - getStorename, - getIndexKeys, + traverseModel, + validatePredicate, } from '../../util'; + +import AsyncStorageDatabase from './AsyncStorageDatabase'; import { StorageAdapterBase } from './StorageAdapterBase'; export class AsyncStorageAdapter extends StorageAdapterBase { protected db!: AsyncStorageDatabase; - // no-ops for this adapter - protected async preSetUpChecks() {} - protected async preOpCheck() {} + protected async preSetUpChecks() { + // no-ops for AsyncStorageAdapter + } + + protected async preOpCheck() { + // no-ops for AsyncStorageAdapter + } /** * Open AsyncStorage database @@ -40,6 +45,7 @@ export class AsyncStorageAdapter extends StorageAdapterBase { protected async initDb(): Promise { const db = new AsyncStorageDatabase(); await db.init(); + return db; } @@ -77,15 +83,20 @@ export class AsyncStorageAdapter extends StorageAdapterBase { const keyValuesPath = this.getIndexKeyValuesPath(model); - const { instance } = connectedModels.find(({ instance }) => { - const instanceKeyValuesPath = this.getIndexKeyValuesPath(instance); - return keysEqual([instanceKeyValuesPath], [keyValuesPath]); - })!; + const { instance } = connectedModels.find( + ({ instance: connectedModelInstance }) => { + const instanceKeyValuesPath = this.getIndexKeyValuesPath( + connectedModelInstance, + ); + + return keysEqual([instanceKeyValuesPath], [keyValuesPath]); + }, + )!; batch.push(instance); } - return await this.db.batchSave(storeName, batch, keys); + return this.db.batchSave(storeName, batch, keys); } protected async _get(storeName: string, keyArr: string[]): Promise { @@ -93,7 +104,7 @@ export class AsyncStorageAdapter extends StorageAdapterBase { DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR, ); - return await this.db.get(itemKeyValuesPath, storeName); + return (await this.db.get(itemKeyValuesPath, storeName)) as T; } async save( @@ -109,12 +120,15 @@ export class AsyncStorageAdapter extends StorageAdapterBase { const result: [T, OpType.INSERT | OpType.UPDATE][] = []; for await (const resItem of connectionStoreNames) { - const { storeName, item, instance, keys } = resItem; + const { storeName: storeNameForRestItem, item, instance, keys } = resItem; const itemKeyValues: string[] = keys.map(key => item[key]); - const fromDB = await this._get(storeName, itemKeyValues); - const opType: OpType = fromDB ? OpType.UPDATE : OpType.INSERT; + const fromDBForRestItem = (await this._get( + storeNameForRestItem, + itemKeyValues, + )) as T; + const opType: OpType = fromDBForRestItem ? OpType.UPDATE : OpType.INSERT; if ( keysEqual(itemKeyValues, modelKeyValues) || @@ -122,7 +136,7 @@ export class AsyncStorageAdapter extends StorageAdapterBase { ) { await this.db.save( item, - storeName, + storeNameForRestItem, keys, itemKeyValues.join(DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR), ); @@ -130,6 +144,7 @@ export class AsyncStorageAdapter extends StorageAdapterBase { result.push([instance, opType]); } } + return result; } @@ -151,36 +166,39 @@ export class AsyncStorageAdapter extends StorageAdapterBase { if (queryByKey) { const keyValues = queryByKey.join(DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR); const record = await this.getByKey(storeName, keyValues); + return record ? [record] : []; } if (predicates) { const filtered = await this.filterOnPredicate(storeName, predicates); + return this.inMemoryPagination(filtered, pagination); } if (hasSort || hasPagination) { const all = await this.getAll(storeName); + return this.inMemoryPagination(all, pagination); } return this.getAll(storeName); })()) as T[]; - return await this.load(namespaceName, modelConstructor.name, records); + return this.load(namespaceName, modelConstructor.name, records); } private async getByKey( storeName: string, keyValuePath: string, ): Promise { - return await this.db.get(keyValuePath, storeName); + return (await this.db.get(keyValuePath, storeName)) as T; } private async getAll( storeName: string, ): Promise { - return await this.db.getAll(storeName); + return this.db.getAll(storeName); } private async filterOnPredicate( @@ -189,7 +207,7 @@ export class AsyncStorageAdapter extends StorageAdapterBase { ) { const { predicates: predicateObjs, type } = predicates; - const all = await this.getAll(storeName); + const all = (await this.getAll(storeName)) as T[]; const filtered = predicateObjs ? all.filter(m => validatePredicate(m, type, predicateObjs)) @@ -210,7 +228,7 @@ export class AsyncStorageAdapter extends StorageAdapterBase { firstOrLast: QueryOne = QueryOne.FIRST, ): Promise { const storeName = this.getStorenameForModel(modelConstructor); - const result = await this.db.getOne(firstOrLast, storeName); + const result = (await this.db.getOne(firstOrLast, storeName)) as T; return result && this.modelInstanceCreator(modelConstructor, result); } @@ -232,7 +250,7 @@ export class AsyncStorageAdapter extends StorageAdapterBase { } } - //#region platform-specific helper methods + // #region platform-specific helper methods /** * Retrieves concatenated primary key values from a model @@ -246,7 +264,7 @@ export class AsyncStorageAdapter extends StorageAdapterBase { ); } - //#endregion + // #endregion } export default new AsyncStorageAdapter(); diff --git a/packages/datastore/src/storage/adapter/AsyncStorageDatabase.ts b/packages/datastore/src/storage/adapter/AsyncStorageDatabase.ts index b78c81c3658..f7592f53640 100644 --- a/packages/datastore/src/storage/adapter/AsyncStorageDatabase.ts +++ b/packages/datastore/src/storage/adapter/AsyncStorageDatabase.ts @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { ULID } from 'ulid'; + import { ModelInstanceMetadata, OpType, @@ -13,6 +14,7 @@ import { indexNameFromKeys, monotonicUlidFactory, } from '../../util'; + import { createInMemoryStore } from './InMemoryStore'; const DB_NAME = '@AmplifyDatastore'; @@ -72,12 +74,12 @@ class AsyncStorageDatabase { if (id === undefined) { // It is an old entry (without ulid). Need to migrate to new key format - const id = ulidOrId; + const resolvedId = ulidOrId; const newUlid = this.getMonotonicFactory(storeName)(); - const oldKey = this.getLegacyKeyForItem(storeName, id); - const newKey = this.getKeyForItem(storeName, id, newUlid); + const oldKey = this.getLegacyKeyForItem(storeName, resolvedId); + const newKey = this.getKeyForItem(storeName, resolvedId, newUlid); const item = await this.storage.getItem(oldKey); @@ -161,7 +163,7 @@ class AsyncStorageDatabase { ); allItemsKeys.push(key); - itemsMap[key] = { ulid, model: (item) }; + itemsMap[key] = { ulid, model: item as unknown as T }; if (_deleted) { keysToDelete.add(key); @@ -180,6 +182,7 @@ class AsyncStorageDatabase { await new Promise((resolve, reject) => { if (keysToDelete.size === 0) { resolve(); + return; } @@ -208,6 +211,7 @@ class AsyncStorageDatabase { await new Promise((resolve, reject) => { if (keysToSave.size === 0) { resolve(); + return; } @@ -258,6 +262,7 @@ class AsyncStorageDatabase { const itemKey = this.getKeyForItem(storeName, keyValuePath, ulid); const recordAsString = await this.storage.getItem(itemKey); const record = recordAsString && JSON.parse(recordAsString); + return record; } @@ -267,14 +272,17 @@ class AsyncStorageDatabase { const [itemId, ulid] = firstOrLast === QueryOne.FIRST ? (() => { - let id: string, ulid: string; - for ([id, ulid] of collection) break; // Get first element of the set - return [id!, ulid!]; + let resolvedId: string, resolvedUlid: string; + // eslint-disable-next-line no-unreachable-loop + for ([resolvedId, resolvedUlid] of collection) break; // Get first element of the set + + return [resolvedId!, resolvedUlid!]; })() : (() => { - let id: string, ulid: string; - for ([id, ulid] of collection); // Get last element of the set - return [id!, ulid!]; + let resolvedId: string, resolvedUlid: string; + for ([resolvedId, resolvedUlid] of collection); // Get last element of the set + + return [resolvedId!, resolvedUlid!]; })(); const itemKey = this.getKeyForItem(storeName, itemId, ulid); diff --git a/packages/datastore/src/storage/adapter/InMemoryStore.ts b/packages/datastore/src/storage/adapter/InMemoryStore.ts index 0ea858f59dc..c275ee0e85e 100644 --- a/packages/datastore/src/storage/adapter/InMemoryStore.ts +++ b/packages/datastore/src/storage/adapter/InMemoryStore.ts @@ -9,7 +9,11 @@ export class InMemoryStore { multiGet = async (keys: string[]) => { return keys.reduce( - (res, k) => (res.push([k, this.db.get(k)!]), res), + (res, k) => { + res.push([k, this.db.get(k)!]); + + return res; + }, [] as [string, string][], ); }; diff --git a/packages/datastore/src/storage/adapter/IndexedDBAdapter.ts b/packages/datastore/src/storage/adapter/IndexedDBAdapter.ts index d059dd996d8..6e8a6986160 100644 --- a/packages/datastore/src/storage/adapter/IndexedDBAdapter.ts +++ b/packages/datastore/src/storage/adapter/IndexedDBAdapter.ts @@ -1,9 +1,9 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import * as idb from 'idb'; +import { ConsoleLogger } from '@aws-amplify/core'; + import { - isPredicateObj, - isPredicateGroup, ModelInstanceMetadata, ModelPredicate, OpType, @@ -13,18 +13,20 @@ import { PredicateObject, PredicatesGroup, QueryOne, + isPredicateGroup, + isPredicateObj, } from '../../types'; import { + getStorename, + inMemoryPagination, isPrivateMode, + isSafariCompatabilityMode, + keysEqual, traverseModel, validatePredicate, - inMemoryPagination, - keysEqual, - getStorename, - isSafariCompatabilityMode, } from '../../util'; + import { StorageAdapterBase } from './StorageAdapterBase'; -import { ConsoleLogger } from '@aws-amplify/core'; const logger = new ConsoleLogger('DataStore'); @@ -55,7 +57,7 @@ const DB_VERSION = 3; class IndexedDBAdapter extends StorageAdapterBase { protected db!: idb.IDBPDatabase; - private safariCompatabilityMode: boolean = false; + private safariCompatabilityMode = false; // checks are called by StorageAdapterBase class protected async preSetUpChecks() { @@ -77,7 +79,7 @@ class IndexedDBAdapter extends StorageAdapterBase { * @returns IDB Database instance */ protected async initDb(): Promise { - return await idb.openDB(this.dbName, DB_VERSION, { + return idb.openDB(this.dbName, DB_VERSION, { upgrade: async (db, oldVersion, newVersion, txn) => { // create new database if (oldVersion === 0) { @@ -171,8 +173,6 @@ class IndexedDBAdapter extends StorageAdapterBase { txn.abort(); throw error; } - - return; } }, }); @@ -194,7 +194,7 @@ class IndexedDBAdapter extends StorageAdapterBase { const result = await index.get(this.canonicalKeyPath(keyArr)); - return result; + return result as T; } async clear(): Promise { @@ -228,22 +228,25 @@ class IndexedDBAdapter extends StorageAdapterBase { const result: [T, OpType.INSERT | OpType.UPDATE][] = []; for await (const resItem of connectionStoreNames) { - const { storeName, item, instance, keys } = resItem; - const store = tx.objectStore(storeName); + const { storeName: storeNameForRestItem, item, instance, keys } = resItem; + const storeForRestItem = tx.objectStore(storeNameForRestItem); const itemKeyValues: string[] = keys.map(key => item[key]); - const fromDB = await this._get(store, itemKeyValues); - const opType: OpType = fromDB ? OpType.UPDATE : OpType.INSERT; + const fromDBForRestItem = (await this._get( + storeForRestItem, + itemKeyValues, + )) as T; + const opType: OpType = fromDBForRestItem ? OpType.UPDATE : OpType.INSERT; if ( keysEqual(itemKeyValues, modelKeyValues) || opType === OpType.INSERT ) { - const key = await store + const key = await storeForRestItem .index('byPk') .getKey(this.canonicalKeyPath(itemKeyValues)); - await store.put(item, key); + await storeForRestItem.put(item, key); result.push([instance, opType]); } } @@ -281,16 +284,19 @@ class IndexedDBAdapter extends StorageAdapterBase { // if (queryByKey) { const record = await this.getByKey(storeName, queryByKey); + return record ? [record] : []; } if (predicates) { const filtered = await this.filterOnPredicate(storeName, predicates); + return this.inMemoryPagination(filtered, pagination); } if (hasSort) { const all = await this.getAll(storeName); + return this.inMemoryPagination(all, pagination); } @@ -301,7 +307,7 @@ class IndexedDBAdapter extends StorageAdapterBase { return this.getAll(storeName); })()) as T[]; - return await this.load(namespaceName, modelConstructor.name, records); + return this.load(namespaceName, modelConstructor.name, records); } async queryOne( @@ -316,7 +322,7 @@ class IndexedDBAdapter extends StorageAdapterBase { .objectStore(storeName) .openCursor(undefined, firstOrLast === QueryOne.FIRST ? 'next' : 'prev'); - const result = cursor ? cursor.value : undefined; + const result = cursor ? (cursor.value as T) : undefined; return result && this.modelInstanceCreator(modelConstructor, result); } @@ -337,7 +343,7 @@ class IndexedDBAdapter extends StorageAdapterBase { const result: [T, OpType][] = []; const txn = this.db.transaction(storeName, 'readwrite'); - const store = txn.store; + const { store } = txn; for (const item of items) { const model = this.modelInstanceCreator(modelConstructor, item); @@ -358,18 +364,23 @@ class IndexedDBAdapter extends StorageAdapterBase { const key = await index.getKey(this.canonicalKeyPath(keyValues)); if (!_deleted) { - const { instance } = connectedModels.find(({ instance }) => { - const instanceKeyValues = this.getIndexKeyValuesFromModel(instance); - return keysEqual(instanceKeyValues, keyValues); - })!; + const { instance } = connectedModels.find( + ({ instance: connectedModelInstance }) => { + const instanceKeyValues = this.getIndexKeyValuesFromModel( + connectedModelInstance, + ); + + return keysEqual(instanceKeyValues, keyValues); + }, + )!; result.push([ - (instance), + instance as unknown as T, key ? OpType.UPDATE : OpType.INSERT, ]); await store.put(instance, key); } else { - result.push([(item), OpType.DELETE]); + result.push([item as unknown as T, OpType.DELETE]); if (key) { await store.delete(key); @@ -419,14 +430,14 @@ class IndexedDBAdapter extends StorageAdapterBase { } } - //#region platform-specific helper methods + // #region platform-specific helper methods private async checkPrivate() { - const isPrivate = await isPrivateMode().then(isPrivate => { - return isPrivate; - }); + const isPrivate = await isPrivateMode(); if (isPrivate) { logger.error("IndexedDB not supported in this browser's private mode"); + + // eslint-disable-next-line prefer-promise-reject-errors return Promise.reject( "IndexedDB not supported in this browser's private mode", ); @@ -455,6 +466,7 @@ class IndexedDBAdapter extends StorageAdapterBase { private getNamespaceAndModelFromStorename(storeName: string) { const [namespaceName, ...modelNameArr] = storeName.split('_'); + return { namespaceName, modelName: modelNameArr.join('_'), @@ -485,13 +497,13 @@ class IndexedDBAdapter extends StorageAdapterBase { storeName: string, keyValue: string[], ): Promise { - return await this._get(storeName, keyValue); + return (await this._get(storeName, keyValue)) as T; } private async getAll( storeName: string, ): Promise { - return await this.db.getAll(storeName); + return this.db.getAll(storeName); } /** @@ -565,7 +577,7 @@ class IndexedDBAdapter extends StorageAdapterBase { isPredicateGroup(predicateObjs[0]) && (predicateObjs[0] as PredicatesGroup).type !== 'not' ) { - type = (predicateObjs[0] as PredicatesGroup).type; + ({ type } = predicateObjs[0] as PredicatesGroup); predicateObjs = (predicateObjs[0] as PredicatesGroup).predicates; } @@ -702,7 +714,7 @@ class IndexedDBAdapter extends StorageAdapterBase { // nothing intelligent we can do with `not` groups unless or until we start // smashing comparison operators against indexes -- at which point we could // perform some reversal here. - candidateResults = await this.getAll(storeName); + candidateResults = (await this.getAll(storeName)) as T[]; } const filtered = predicateObjs @@ -753,7 +765,7 @@ class IndexedDBAdapter extends StorageAdapterBase { result = pageResults; } else { - result = await this.db.getAll(storeName); + result = (await this.db.getAll(storeName)) as T[]; } return result; @@ -771,9 +783,10 @@ class IndexedDBAdapter extends StorageAdapterBase { if (this.safariCompatabilityMode) { return keyArr.length > 1 ? keyArr : keyArr[0]; } + return keyArr; }; - //#endregion + // #endregion } export default new IndexedDBAdapter(); diff --git a/packages/datastore/src/storage/adapter/StorageAdapterBase.ts b/packages/datastore/src/storage/adapter/StorageAdapterBase.ts index 8936979f79f..cf77ea75242 100644 --- a/packages/datastore/src/storage/adapter/StorageAdapterBase.ts +++ b/packages/datastore/src/storage/adapter/StorageAdapterBase.ts @@ -1,11 +1,12 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { Adapter } from './index'; +import type { IDBPDatabase, IDBPObjectStore } from 'idb'; +import { ConsoleLogger } from '@aws-amplify/core'; + import { ModelInstanceCreator } from '../../datastore/datastore'; import { ModelPredicateCreator } from '../../predicates'; import { InternalSchema, - isPredicateObj, ModelInstanceMetadata, ModelPredicate, NamespaceResolver, @@ -16,21 +17,23 @@ import { PredicateObject, PredicatesGroup, QueryOne, + isPredicateObj, } from '../../types'; import { NAMESPACES, - getStorename, - getIndexKeys, + extractPrimaryKeyFieldNames, extractPrimaryKeyValues, + getIndexKeys, + getStorename, + isModelConstructor, traverseModel, validatePredicate, - isModelConstructor, - extractPrimaryKeyFieldNames, } from '../../util'; -import type { IDBPDatabase, IDBPObjectStore } from 'idb'; -import type AsyncStorageDatabase from './AsyncStorageDatabase'; import { ModelRelationship } from '../relationship'; -import { ConsoleLogger } from '@aws-amplify/core'; + +import type AsyncStorageDatabase from './AsyncStorageDatabase'; + +import { Adapter } from './index'; const logger = new ConsoleLogger('DataStore'); const DB_NAME = 'amplify-datastore'; @@ -46,6 +49,7 @@ export abstract class StorageAdapterBase implements Adapter { namsespaceName: NAMESPACES, modelName: string, ) => PersistentModelConstructor; + protected initPromise!: Promise; protected resolve!: (value?: any) => void; protected reject!: (value?: any) => void; @@ -78,12 +82,13 @@ export abstract class StorageAdapterBase implements Adapter { await this.preSetUpChecks(); if (!this.initPromise) { - this.initPromise = new Promise((res, rej) => { - this.resolve = res; - this.reject = rej; + this.initPromise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; }); } else { await this.initPromise; + return; } if (sessionId) { @@ -195,13 +200,14 @@ export abstract class StorageAdapterBase implements Adapter { const set = new Set(); const connectionStoreNames = Object.values(connectedModels).map( ({ modelName, item, instance }) => { - const storeName = getStorename(namespaceName, modelName); - set.add(storeName); + const resolvedStoreName = getStorename(namespaceName, modelName); + set.add(resolvedStoreName); const keys = getIndexKeys( this.schema.namespaces[namespaceName], modelName, ); - return { storeName, item, instance, keys }; + + return { storeName: resolvedStoreName, item, instance, keys }; }, ); @@ -397,7 +403,7 @@ export abstract class StorageAdapterBase implements Adapter { const deletedModels = deleteQueue.reduce( (acc, { items }) => acc.concat(items), - [], + [] as T[], ); return [models, deletedModels]; @@ -413,7 +419,7 @@ export abstract class StorageAdapterBase implements Adapter { const deletedModels = deleteQueue.reduce( (acc, { items }) => acc.concat(items), - [], + [] as T[], ); return [models, deletedModels]; @@ -471,7 +477,7 @@ export abstract class StorageAdapterBase implements Adapter { const deletedModels = deleteQueue.reduce( (acc, { items }) => acc.concat(items), - [], + [] as T[], ); return [[model], deletedModels]; diff --git a/packages/datastore/src/storage/adapter/getDefaultAdapter/index.native.ts b/packages/datastore/src/storage/adapter/getDefaultAdapter/index.native.ts index 95de512f4bd..f4313eee9a2 100644 --- a/packages/datastore/src/storage/adapter/getDefaultAdapter/index.native.ts +++ b/packages/datastore/src/storage/adapter/getDefaultAdapter/index.native.ts @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { Adapter } from '..'; +// eslint-disable-next-line import/no-named-as-default import AsyncStorageAdapter from '../AsyncStorageAdapter'; const getDefaultAdapter: () => Adapter = () => { diff --git a/packages/datastore/src/storage/adapter/getDefaultAdapter/index.ts b/packages/datastore/src/storage/adapter/getDefaultAdapter/index.ts index d2e93163ba0..d4053fb29fe 100644 --- a/packages/datastore/src/storage/adapter/getDefaultAdapter/index.ts +++ b/packages/datastore/src/storage/adapter/getDefaultAdapter/index.ts @@ -1,9 +1,12 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { isBrowser, isWebWorker } from '@aws-amplify/core/internals/utils'; + import { Adapter } from '..'; import IndexedDBAdapter from '../IndexedDBAdapter'; +// eslint-disable-next-line import/no-named-as-default import AsyncStorageAdapter from '../AsyncStorageAdapter'; -import { isWebWorker, isBrowser } from '@aws-amplify/core/internals/utils'; + const getDefaultAdapter: () => Adapter = () => { if ((isBrowser && window.indexedDB) || (isWebWorker() && self.indexedDB)) { return IndexedDBAdapter as Adapter; diff --git a/packages/datastore/src/storage/adapter/index.ts b/packages/datastore/src/storage/adapter/index.ts index 84b47c3baf2..67797f64311 100644 --- a/packages/datastore/src/storage/adapter/index.ts +++ b/packages/datastore/src/storage/adapter/index.ts @@ -17,10 +17,10 @@ export interface Adapter extends SystemComponent { model: T, condition?: ModelPredicate, ): Promise<[T, OpType.INSERT | OpType.UPDATE][]>; - delete: ( + delete( modelOrModelConstructor: T | PersistentModelConstructor, condition?: ModelPredicate, - ) => Promise<[T[], T[]]>; + ): Promise<[T[], T[]]>; query( modelConstructor: PersistentModelConstructor, predicate?: ModelPredicate, diff --git a/packages/datastore/src/storage/relationship.ts b/packages/datastore/src/storage/relationship.ts index 3d35d75f8d4..7cb92dfbf4f 100644 --- a/packages/datastore/src/storage/relationship.ts +++ b/packages/datastore/src/storage/relationship.ts @@ -1,6 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { isFieldAssociation, ModelFieldType, ModelMeta } from '../types'; +import { ModelFieldType, ModelMeta, isFieldAssociation } from '../types'; /** * Defines a relationship from a LOCAL model.field to a REMOTE model.field and helps @@ -52,6 +52,7 @@ export class ModelRelationship { const relationship = ModelRelationship.from(model, field); relationship && relationships.push(relationship); } + return relationships; } @@ -212,6 +213,7 @@ export class ModelRelationship { // This case is theoretically unnecessary going forward. return [this.explicitRemoteAssociation.targetName!]; } else if (this.explicitRemoteAssociation?.targetNames) { + // eslint-disable-next-line @typescript-eslint/no-non-null-asserted-optional-chain return this.explicitRemoteAssociation?.targetNames!; } else if (this.localAssociatedWith) { return this.localAssociatedWith; @@ -249,6 +251,7 @@ export class ModelRelationship { for (let i = 0; i < this.localJoinFields.length; i++) { fk[this.localJoinFields[i]] = remote[this.remoteJoinFields[i]]; } + return fk; } @@ -278,6 +281,7 @@ export class ModelRelationship { if (localValue === null || localValue === undefined) return null; query[this.remoteJoinFields[i]] = local[this.localJoinFields[i]]; } + return query; } } diff --git a/packages/datastore/src/storage/storage.ts b/packages/datastore/src/storage/storage.ts index 6912df89751..5b8d23747be 100644 --- a/packages/datastore/src/storage/storage.ts +++ b/packages/datastore/src/storage/storage.ts @@ -1,11 +1,15 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { Observable, filter, map, Subject } from 'rxjs'; +import { Observable, Subject, filter, map } from 'rxjs'; import { Patch } from 'immer'; +import { Mutex } from '@aws-amplify/core/internals/utils'; +import { ConsoleLogger } from '@aws-amplify/core'; + import { ModelInstanceCreator } from '../datastore/datastore'; import { ModelPredicateCreator } from '../predicates'; import { InternalSchema, + InternalSubscriptionMessage, ModelInstanceMetadata, ModelPredicate, NamespaceResolver, @@ -16,26 +20,24 @@ import { PredicatesGroup, QueryOne, SchemaNamespace, - InternalSubscriptionMessage, SubscriptionMessage, isTargetNameAssociation, } from '../types'; import { - isModelConstructor, + NAMESPACES, STORAGE, + isModelConstructor, validatePredicate, valuesEqual, - NAMESPACES, } from '../util'; import { getIdentifierValue } from '../sync/utils'; + import { Adapter } from './adapter'; import getDefaultAdapter from './adapter/getDefaultAdapter'; -import { Mutex } from '@aws-amplify/core/internals/utils'; -import { ConsoleLogger } from '@aws-amplify/core'; export type StorageSubscriptionMessage = InternalSubscriptionMessage & { - mutator?: Symbol; + mutator?: symbol; }; export type StorageFacade = Omit; @@ -78,6 +80,7 @@ class StorageClass implements StorageFacade { async init() { if (this.initialized !== undefined) { await this.initialized; + return; } logger.debug('Starting Storage'); @@ -85,9 +88,9 @@ class StorageClass implements StorageFacade { let resolve: (value?: void | PromiseLike) => void; let reject: (value?: void | PromiseLike) => void; - this.initialized = new Promise((res, rej) => { - resolve = res; - reject = rej; + this.initialized = new Promise((_resolve, _reject) => { + resolve = _resolve; + reject = _reject; }); this.adapter!.setUp( @@ -104,7 +107,7 @@ class StorageClass implements StorageFacade { async save( model: T, condition?: ModelPredicate, - mutator?: Symbol, + mutator?: symbol, patchesTuple?: [Patch[], PersistentModel], ): Promise<[T, OpType.INSERT | OpType.UPDATE][]> { await this.init(); @@ -153,7 +156,7 @@ class StorageClass implements StorageFacade { const element = updateMutationInput || savedElement; - const modelConstructor = (Object.getPrototypeOf(savedElement) as Object) + const modelConstructor = (Object.getPrototypeOf(savedElement) as object) .constructor as PersistentModelConstructor; this.pushStream.next({ @@ -175,17 +178,19 @@ class StorageClass implements StorageFacade { delete( model: T, condition?: ModelPredicate, - mutator?: Symbol, + mutator?: symbol, ): Promise<[T[], T[]]>; + delete( modelConstructor: PersistentModelConstructor, condition?: ModelPredicate, - mutator?: Symbol, + mutator?: symbol, ): Promise<[T[], T[]]>; + async delete( modelOrModelConstructor: T | PersistentModelConstructor, condition?: ModelPredicate, - mutator?: Symbol, + mutator?: symbol, ): Promise<[T[], T[]]> { await this.init(); if (!this.adapter) { @@ -212,6 +217,7 @@ class StorageClass implements StorageFacade { const modelIds = new Set( models.map(model => { const modelId = getIdentifierValue(modelDefinition, model); + return modelId; }), ); @@ -224,7 +230,7 @@ class StorageClass implements StorageFacade { } deleted.forEach(model => { - const modelConstructor = (Object.getPrototypeOf(model) as Object) + const resolvedModelConstructor = (Object.getPrototypeOf(model) as object) .constructor as PersistentModelConstructor; let theCondition: PredicatesGroup | undefined; @@ -237,7 +243,7 @@ class StorageClass implements StorageFacade { } this.pushStream.next({ - model: modelConstructor, + model: resolvedModelConstructor, opType: OpType.DELETE, element: model, mutator, @@ -258,7 +264,7 @@ class StorageClass implements StorageFacade { throw new Error('Storage adapter is missing'); } - return await this.adapter.query(modelConstructor, predicate, pagination); + return this.adapter.query(modelConstructor, predicate, pagination); } async queryOne( @@ -270,13 +276,13 @@ class StorageClass implements StorageFacade { throw new Error('Storage adapter is missing'); } - return await this.adapter.queryOne(modelConstructor, firstOrLast); + return this.adapter.queryOne(modelConstructor, firstOrLast); } observe( modelConstructor?: PersistentModelConstructor | null, predicate?: ModelPredicate | null, - skipOwn?: Symbol, + skipOwn?: symbol, ): Observable> { const listenToAll = !modelConstructor; const { predicates, type } = @@ -331,7 +337,7 @@ class StorageClass implements StorageFacade { async batchSave( modelConstructor: PersistentModelConstructor, items: ModelInstanceMetadata[], - mutator?: Symbol, + mutator?: symbol, ): Promise<[T, OpType][]> { await this.init(); if (!this.adapter) { @@ -367,9 +373,9 @@ class StorageClass implements StorageFacade { const [patches, source] = patchesTuple!; const updatedElement = {}; // extract array of updated fields from patches - const updatedFields = ( - patches.map(patch => patch.path && patch.path[0]) - ); + const updatedFields = patches.map( + patch => patch.path && patch.path[0], + ) as string[]; // check model def for association and replace with targetName if exists const modelConstructor = Object.getPrototypeOf(model) @@ -487,13 +493,13 @@ class ExclusiveStorage implements StorageFacade { } runExclusive(fn: (storage: StorageClass) => Promise) { - return >this.mutex.runExclusive(fn.bind(this, this.storage)); + return this.mutex.runExclusive(fn.bind(this, this.storage)) as Promise; } async save( model: T, condition?: ModelPredicate, - mutator?: Symbol, + mutator?: symbol, patchesTuple?: [Patch[], PersistentModel], ): Promise<[T, OpType.INSERT | OpType.UPDATE][]> { return this.runExclusive<[T, OpType.INSERT | OpType.UPDATE][]>(storage => @@ -504,17 +510,19 @@ class ExclusiveStorage implements StorageFacade { async delete( model: T, condition?: ModelPredicate, - mutator?: Symbol, + mutator?: symbol, ): Promise<[T[], T[]]>; + async delete( modelConstructor: PersistentModelConstructor, condition?: ModelPredicate, - mutator?: Symbol, + mutator?: symbol, ): Promise<[T[], T[]]>; + async delete( modelOrModelConstructor: T | PersistentModelConstructor, condition?: ModelPredicate, - mutator?: Symbol, + mutator?: symbol, ): Promise<[T[], T[]]> { return this.runExclusive<[T[], T[]]>(storage => { if (isModelConstructor(modelOrModelConstructor)) { @@ -555,7 +563,7 @@ class ExclusiveStorage implements StorageFacade { observe( modelConstructor?: PersistentModelConstructor | null, predicate?: ModelPredicate | null, - skipOwn?: Symbol, + skipOwn?: symbol, ): Observable> { return this.storage.observe(modelConstructor, predicate, skipOwn); } diff --git a/packages/datastore/src/sync/datastoreConnectivity.ts b/packages/datastore/src/sync/datastoreConnectivity.ts index 17ce4bd1b75..10395ed753e 100644 --- a/packages/datastore/src/sync/datastoreConnectivity.ts +++ b/packages/datastore/src/sync/datastoreConnectivity.ts @@ -1,17 +1,15 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { Observable, Observer, SubscriptionLike } from 'rxjs'; -import { ReachabilityMonitor } from './datastoreReachability'; -import { ConsoleLogger } from '@aws-amplify/core'; -const logger = new ConsoleLogger('DataStore'); +import { ReachabilityMonitor } from './datastoreReachability'; const RECONNECTING_IN = 5000; // 5s this may be configurable in the future -type ConnectionStatus = { +interface ConnectionStatus { // Might add other params in the future online: boolean; -}; +} export default class DataStoreConnectivity { private connectionStatus: ConnectionStatus; @@ -28,6 +26,7 @@ export default class DataStoreConnectivity { if (this.observer) { throw new Error('Subscriber already exists'); } + return new Observable(observer => { this.observer = observer; // Will be used to forward socket connection changes, enhancing Reachability @@ -57,7 +56,6 @@ export default class DataStoreConnectivity { // for consistency with other background processors. async stop() { this.unsubscribe(); - return; } socketDisconnected() { diff --git a/packages/datastore/src/sync/index.ts b/packages/datastore/src/sync/index.ts index bcf1e9c72c3..3575caab2a8 100644 --- a/packages/datastore/src/sync/index.ts +++ b/packages/datastore/src/sync/index.ts @@ -1,37 +1,40 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { BackgroundProcessManager } from '@aws-amplify/core/internals/utils'; -import { Hub, ConsoleLogger } from '@aws-amplify/core'; +import { ConsoleLogger, Hub } from '@aws-amplify/core'; +import { Observable, SubscriptionLike, filter, of } from 'rxjs'; +import { + ConnectionState, + CONNECTION_STATE_CHANGE as PUBSUB_CONNECTION_STATE_CHANGE, + CONTROL_MSG as PUBSUB_CONTROL_MSG, +} from '@aws-amplify/api-graphql'; -import { filter, Observable, of, SubscriptionLike } from 'rxjs'; import { ModelInstanceCreator } from '../datastore/datastore'; import { ModelPredicateCreator } from '../predicates'; import { ExclusiveStorage as Storage } from '../storage/storage'; import { + AmplifyContext, + AuthModeStrategy, ConflictHandler, ControlMessageType, ErrorHandler, InternalSchema, + ManagedIdentifier, ModelInit, ModelInstanceMetadata, + ModelPredicate, MutableModel, NamespaceResolver, OpType, - PersistentModel, + OptionallyManagedIdentifier, PersistentModelConstructor, SchemaModel, SchemaNamespace, TypeConstructorMap, - ModelPredicate, - AuthModeStrategy, - ManagedIdentifier, - OptionallyManagedIdentifier, - AmplifyContext, } from '../types'; -// tslint:disable:no-duplicate-imports import type { __modelMeta__ } from '../types'; +import { SYNC, USER, getNow } from '../util'; -import { getNow, SYNC, USER } from '../util'; import DataStoreConnectivity from './datastoreConnectivity'; import { ModelMerger } from './merger'; import { MutationEventOutbox } from './outbox'; @@ -39,30 +42,25 @@ import { MutationProcessor } from './processors/mutation'; import { CONTROL_MSG, SubscriptionProcessor } from './processors/subscription'; import { SyncProcessor } from './processors/sync'; import { + TransformerMutationType, createMutationInstanceFromModelOperation, getIdentifierValue, predicateToGraphQLCondition, - TransformerMutationType, } from './utils'; -import { - CONTROL_MSG as PUBSUB_CONTROL_MSG, - ConnectionState, - CONNECTION_STATE_CHANGE as PUBSUB_CONNECTION_STATE_CHANGE, -} from '@aws-amplify/api-graphql'; - const logger = new ConsoleLogger('DataStore'); const ownSymbol = Symbol('sync'); -type StartParams = { +interface StartParams { fullSyncInterval: number; -}; +} export declare class MutationEvent { readonly [__modelMeta__]: { identifier: OptionallyManagedIdentifier; }; + public readonly id: string; public readonly model: string; public readonly operation: TransformerMutationType; @@ -80,6 +78,7 @@ export declare class ModelMetadata { readonly [__modelMeta__]: { identifier: ManagedIdentifier; }; + public readonly id: string; public readonly namespace: string; public readonly model: string; @@ -116,15 +115,17 @@ export class SyncEngine { private readonly modelMerger: ModelMerger; private readonly outbox: MutationEventOutbox; private readonly datastoreConnectivity: DataStoreConnectivity; - private readonly modelSyncedStatus: WeakMap< + private readonly modelSyncedStatus = new WeakMap< PersistentModelConstructor, boolean - > = new WeakMap(); + >(); + private unsleepSyncQueriesObservable: (() => void) | null; private waitForSleepState: Promise; private syncQueriesObservableStartSleeping: ( value?: void | PromiseLike, ) => void; + private stopDisruptionListener: () => void; private connectionDisrupted = false; @@ -159,13 +160,12 @@ export class SyncEngine { this.syncQueriesObservableStartSleeping = resolve; }); - const MutationEvent = this.modelClasses[ - 'MutationEvent' - ] as PersistentModelConstructor; + const MutationEventCtor = this.modelClasses + .MutationEvent as PersistentModelConstructor; this.outbox = new MutationEventOutbox( this.schema, - MutationEvent, + MutationEventCtor, modelInstanceCreator, ownSymbol, ); @@ -196,7 +196,7 @@ export class SyncEngine { this.userModelClasses, this.outbox, this.modelInstanceCreator, - MutationEvent, + MutationEventCtor, this.amplifyConfig, this.authModeStrategy, errorHandler, @@ -219,203 +219,200 @@ export class SyncEngine { await this.setupModels(params); } catch (err) { observer.error(err); + return; } // this is awaited at the bottom. so, we don't need to register // this explicitly with the context. it's already contained. - const startPromise = new Promise( - (doneStarting, failedStarting) => { - this.datastoreConnectivity.status().subscribe( - async ({ online }) => - this.runningProcesses.isOpen && - this.runningProcesses.add(async onTerminate => { - // From offline to online - if (online && !this.online) { - this.online = online; - - observer.next({ - type: ControlMessage.SYNC_ENGINE_NETWORK_STATUS, - data: { - active: this.online, - }, + const startPromise = new Promise((resolve, reject) => { + const doneStarting = resolve; + const failedStarting = reject; + + this.datastoreConnectivity.status().subscribe( + async ({ online }) => + this.runningProcesses.isOpen && + this.runningProcesses.add(async onTerminate => { + // From offline to online + if (online && !this.online) { + this.online = online; + + observer.next({ + type: ControlMessage.SYNC_ENGINE_NETWORK_STATUS, + data: { + active: this.online, + }, + }); + + this.stopDisruptionListener = this.startDisruptionListener(); + // #region GraphQL Subscriptions + const [ctlSubsObservable, dataSubsObservable] = + this.subscriptionsProcessor.start(); + + try { + await new Promise((_resolve, _reject) => { + onTerminate.then(_reject); + const ctlSubsSubscription = ctlSubsObservable.subscribe({ + next: msg => { + if (msg === CONTROL_MSG.CONNECTED) { + _resolve(); + } + }, + error: err => { + _reject(err); + const handleDisconnect = this.disconnectionHandler(); + handleDisconnect(err); + }, + }); + + subscriptions.push(ctlSubsSubscription); }); + } catch (err) { + observer.error(err); + failedStarting(); - let ctlSubsObservable: Observable; - let dataSubsObservable: Observable< - [TransformerMutationType, SchemaModel, PersistentModel] - >; - - this.stopDisruptionListener = - this.startDisruptionListener(); - //#region GraphQL Subscriptions - [ctlSubsObservable, dataSubsObservable] = - this.subscriptionsProcessor.start(); - - try { - await new Promise((resolve, reject) => { - onTerminate.then(reject); - const ctlSubsSubscription = ctlSubsObservable.subscribe( - { - next: msg => { - if (msg === CONTROL_MSG.CONNECTED) { - resolve(); - } - }, - error: err => { - reject(err); - const handleDisconnect = - this.disconnectionHandler(); - handleDisconnect(err); - }, - }, - ); + return; + } - subscriptions.push(ctlSubsSubscription); - }); - } catch (err) { - observer.error(err); - failedStarting(); - return; - } + logger.log('Realtime ready'); - logger.log('Realtime ready'); + observer.next({ + type: ControlMessage.SYNC_ENGINE_SUBSCRIPTIONS_ESTABLISHED, + }); - observer.next({ - type: ControlMessage.SYNC_ENGINE_SUBSCRIPTIONS_ESTABLISHED, - }); + // #endregion - //#endregion + // #region Base & Sync queries + try { + await new Promise((_resolve, _reject) => { + const syncQuerySubscription = + this.syncQueriesObservable().subscribe({ + next: message => { + const { type } = message; - //#region Base & Sync queries - try { - await new Promise((resolve, reject) => { - const syncQuerySubscription = - this.syncQueriesObservable().subscribe({ - next: message => { - const { type } = message; + if ( + type === + ControlMessage.SYNC_ENGINE_SYNC_QUERIES_READY + ) { + _resolve(); + } - if ( - type === - ControlMessage.SYNC_ENGINE_SYNC_QUERIES_READY - ) { - resolve(); - } + observer.next(message); + }, + complete: () => { + _resolve(); + }, + error: error => { + _reject(error); + }, + }); - observer.next(message); - }, - complete: () => { - resolve(); - }, - error: error => { - reject(error); + if (syncQuerySubscription) { + subscriptions.push(syncQuerySubscription); + } + }); + } catch (error) { + observer.error(error); + failedStarting(); + + return; + } + // #endregion + + // #region process mutations (outbox) + subscriptions.push( + this.mutationsProcessor + .start() + .subscribe(({ modelDefinition, model: item, hasMore }) => + this.runningProcesses.add(async () => { + const modelConstructor = this.userModelClasses[ + modelDefinition.name + ] as PersistentModelConstructor; + + const model = this.modelInstanceCreator( + modelConstructor, + item, + ); + + await this.storage.runExclusive(storage => + this.modelMerger.merge( + storage, + model, + modelDefinition, + ), + ); + + observer.next({ + type: ControlMessage.SYNC_ENGINE_OUTBOX_MUTATION_PROCESSED, + data: { + model: modelConstructor, + element: model, }, }); - if (syncQuerySubscription) { - subscriptions.push(syncQuerySubscription); - } - }); - } catch (error) { - observer.error(error); - failedStarting(); - return; - } - //#endregion - - //#region process mutations (outbox) - subscriptions.push( - this.mutationsProcessor - .start() - .subscribe( - ({ modelDefinition, model: item, hasMore }) => - this.runningProcesses.add(async () => { - const modelConstructor = this.userModelClasses[ - modelDefinition.name - ] as PersistentModelConstructor; - - const model = this.modelInstanceCreator( - modelConstructor, - item, - ); - - await this.storage.runExclusive(storage => - this.modelMerger.merge( - storage, - model, - modelDefinition, - ), - ); - - observer.next({ - type: ControlMessage.SYNC_ENGINE_OUTBOX_MUTATION_PROCESSED, - data: { - model: modelConstructor, - element: model, - }, - }); - - observer.next({ - type: ControlMessage.SYNC_ENGINE_OUTBOX_STATUS, - data: { - isEmpty: !hasMore, - }, - }); - }, 'mutation processor event'), - ), - ); - //#endregion - - //#region Merge subscriptions buffer - subscriptions.push( - dataSubsObservable!.subscribe( - ([_transformerMutationType, modelDefinition, item]) => - this.runningProcesses.add(async () => { - const modelConstructor = this.userModelClasses[ - modelDefinition.name - ] as PersistentModelConstructor; - - const model = this.modelInstanceCreator( - modelConstructor, - item, - ); - - await this.storage.runExclusive(storage => - this.modelMerger.merge( - storage, - model, - modelDefinition, - ), - ); - }, 'subscription dataSubsObservable event'), + observer.next({ + type: ControlMessage.SYNC_ENGINE_OUTBOX_STATUS, + data: { + isEmpty: !hasMore, + }, + }); + }, 'mutation processor event'), ), - ); - //#endregion - } else if (!online) { - this.online = online; - - observer.next({ - type: ControlMessage.SYNC_ENGINE_NETWORK_STATUS, - data: { - active: this.online, - }, - }); - - subscriptions.forEach(sub => sub.unsubscribe()); - subscriptions = []; - } + ); + // #endregion + + // #region Merge subscriptions buffer + subscriptions.push( + dataSubsObservable!.subscribe( + ([_transformerMutationType, modelDefinition, item]) => + this.runningProcesses.add(async () => { + const modelConstructor = this.userModelClasses[ + modelDefinition.name + ] as PersistentModelConstructor; + + const model = this.modelInstanceCreator( + modelConstructor, + item, + ); + + await this.storage.runExclusive(storage => + this.modelMerger.merge( + storage, + model, + modelDefinition, + ), + ); + }, 'subscription dataSubsObservable event'), + ), + ); + // #endregion + } else if (!online) { + this.online = online; + + observer.next({ + type: ControlMessage.SYNC_ENGINE_NETWORK_STATUS, + data: { + active: this.online, + }, + }); + + subscriptions.forEach(sub => { + sub.unsubscribe(); + }); + subscriptions = []; + } - doneStarting(); - }, 'datastore connectivity event'), - ); - }, - ); + doneStarting(); + }, 'datastore connectivity event'), + ); + }); this.storage .observe(null, null, ownSymbol) .pipe( filter(({ model }) => { const modelDefinition = this.getModelDefinition(model); + return modelDefinition.syncable === true; }), ) @@ -424,9 +421,8 @@ export class SyncEngine { this.runningProcesses.add(async () => { const namespace = this.schema.namespaces[this.namespaceResolver(model)]; - const MutationEventConstructor = this.modelClasses[ - 'MutationEvent' - ] as PersistentModelConstructor; + const MutationEventConstructor = this.modelClasses + .MutationEvent as PersistentModelConstructor; const modelDefinition = this.getModelDefinition(model); const graphQLCondition = predicateToGraphQLCondition( condition!, @@ -494,21 +490,14 @@ export class SyncEngine { private async getModelsMetadataWithNextFullSync( currentTimeStamp: number, ): Promise> { - const modelLastSync: Map = new Map( + const modelLastSync = new Map( ( await this.runningProcesses.add( () => this.getModelsMetadata(), 'sync/index getModelsMetadataWithNextFullSync', ) ).map( - ({ - namespace, - model, - lastSync, - lastFullSync, - fullSyncInterval, - lastSyncPredicate, - }) => { + ({ namespace, model, lastSync, lastFullSync, fullSyncInterval }) => { const nextFullSync = lastFullSync! + fullSyncInterval; const syncFrom = !lastFullSync || nextFullSync < currentTimeStamp @@ -541,14 +530,14 @@ export class SyncEngine { let terminated = false; while (!observer.closed && !terminated) { - const count: WeakMap< + const count = new WeakMap< PersistentModelConstructor, { new: number; updated: number; deleted: number; } - > = new WeakMap(); + >(); const modelLastSync = await this.getModelsMetadataWithNextFullSync( Date.now(), @@ -561,9 +550,11 @@ export class SyncEngine { let start: number; let syncDuration: number; let lastStartedAt: number; - await new Promise((resolve, reject) => { + await new Promise((resolve, _reject) => { if (!this.runningProcesses.isOpen) resolve(); - onTerminate.then(() => resolve()); + onTerminate.then(() => { + resolve(); + }); syncQueriesSubscription = this.syncQueriesProcessor .start(modelLastSync) .subscribe({ @@ -613,6 +604,7 @@ export class SyncEngine { } oneByOne.push(item); + return false; }); @@ -661,7 +653,7 @@ export class SyncEngine { if (done) { const { name: modelName } = modelDefinition; - //#region update last sync for type + // #region update last sync for type let modelMetadata = await this.getModelMetadata( namespace, modelName, @@ -694,7 +686,7 @@ export class SyncEngine { undefined, ownSymbol, ); - //#endregion + // #endregion const counts = count.get(modelConstructor); @@ -768,16 +760,16 @@ export class SyncEngine { // TLDR; this is a lot of complexity here for a sleep(), // but, it's not clear to me yet how to support an // extensible, centralized cancelable `sleep()` elegantly. - await this.runningProcesses.add(async onTerminate => { - let sleepTimer; + await this.runningProcesses.add(async onRunningProcessTerminate => { + let _sleepTimer; let unsleep; - const sleep = new Promise(_unsleep => { - unsleep = _unsleep; - sleepTimer = setTimeout(unsleep, msNextFullSync); + const sleep = new Promise(resolve => { + unsleep = resolve; + _sleepTimer = setTimeout(unsleep, msNextFullSync); }); - onTerminate.then(() => { + onRunningProcessTerminate.then(() => { terminated = true; this.syncQueriesObservableStartSleeping(); unsleep(); @@ -785,6 +777,7 @@ export class SyncEngine { this.unsleepSyncQueriesObservable = unsleep; this.syncQueriesObservableStartSleeping(); + return sleep; }, 'syncQueriesObservable sleep'); @@ -927,10 +920,10 @@ export class SyncEngine { } private async getModelsMetadata(): Promise { - const ModelMetadata = this.modelClasses + const ModelMetadataCtor = this.modelClasses .ModelMetadata as PersistentModelConstructor; - const modelsMetadata = await this.storage.query(ModelMetadata); + const modelsMetadata = await this.storage.query(ModelMetadataCtor); return modelsMetadata; } @@ -939,18 +932,22 @@ export class SyncEngine { namespace: string, model: string, ): Promise { - const ModelMetadata = this.modelClasses + const ModelMetadataCtor = this.modelClasses .ModelMetadata as PersistentModelConstructor; const predicate = ModelPredicateCreator.createFromAST( - this.schema.namespaces[SYNC].models[ModelMetadata.name], + this.schema.namespaces[SYNC].models[ModelMetadataCtor.name], { and: [{ namespace: { eq: namespace } }, { model: { eq: model } }] }, ); - const [modelMetadata] = await this.storage.query(ModelMetadata, predicate, { - page: 0, - limit: 1, - }); + const [modelMetadata] = await this.storage.query( + ModelMetadataCtor, + predicate, + { + page: 0, + limit: 1, + }, + ); return modelMetadata; } @@ -1074,6 +1071,7 @@ export class SyncEngine { }, }, }; + return namespace; } diff --git a/packages/datastore/src/sync/merger.ts b/packages/datastore/src/sync/merger.ts index eaf9d3ecab9..0cd5dde2989 100644 --- a/packages/datastore/src/sync/merger.ts +++ b/packages/datastore/src/sync/merger.ts @@ -7,6 +7,7 @@ import { PersistentModelConstructor, SchemaModel, } from '../types'; + import { MutationEventOutbox } from './outbox'; import { getIdentifierValue } from './utils'; @@ -14,7 +15,7 @@ import { getIdentifierValue } from './utils'; class ModelMerger { constructor( private readonly outbox: MutationEventOutbox, - private readonly ownSymbol: Symbol, + private readonly ownSymbol: symbol, ) {} /** @@ -55,7 +56,7 @@ class ModelMerger { items: ModelInstanceMetadata[], modelDefinition: SchemaModel, ): Promise<[ModelInstanceMetadata, OpType][]> { - const itemsMap: Map = new Map(); + const itemsMap = new Map(); for (const item of items) { // merge items by model id. Latest record for a given id remains. @@ -66,7 +67,7 @@ class ModelMerger { const page = [...itemsMap.values()]; - return await storage.batchSave(modelConstructor, page, this.ownSymbol); + return storage.batchSave(modelConstructor, page, this.ownSymbol); } } diff --git a/packages/datastore/src/sync/outbox.ts b/packages/datastore/src/sync/outbox.ts index d693e79c3f0..b555e47b5dd 100644 --- a/packages/datastore/src/sync/outbox.ts +++ b/packages/datastore/src/sync/outbox.ts @@ -1,11 +1,10 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { MutationEvent } from './index'; import { ModelPredicateCreator } from '../predicates'; import { ExclusiveStorage as Storage, - StorageFacade, Storage as StorageClass, + StorageFacade, } from '../storage/storage'; import { ModelInstanceCreator } from '../datastore/datastore'; import { @@ -15,8 +14,11 @@ import { QueryOne, SchemaModel, } from '../types'; -import { USER, SYNC, directedValueEquality } from '../util'; -import { getIdentifierValue, TransformerMutationType } from './utils'; +import { SYNC, USER, directedValueEquality } from '../util'; + +import { TransformerMutationType, getIdentifierValue } from './utils'; + +import { MutationEvent } from './index'; // TODO: Persist deleted ids // https://github.com/aws-amplify/amplify-js/blob/datastore-docs/packages/datastore/docs/sync-engine.md#outbox @@ -25,9 +27,9 @@ class MutationEventOutbox { constructor( private readonly schema: InternalSchema, - private readonly MutationEvent: PersistentModelConstructor, + private readonly _MutationEvent: PersistentModelConstructor, private readonly modelInstanceCreator: ModelInstanceCreator, - private readonly ownSymbol: Symbol, + private readonly ownSymbol: symbol, ) {} public async enqueue( @@ -36,7 +38,7 @@ class MutationEventOutbox { ): Promise { await storage.runExclusive(async s => { const mutationEventModelDefinition = - this.schema.namespaces[SYNC].models['MutationEvent']; + this.schema.namespaces[SYNC].models.MutationEvent; // `id` is the key for the record in the mutationEvent; // `modelId` is the key for the actual record that was mutated @@ -51,11 +53,12 @@ class MutationEventOutbox { ); // Check if there are any other records with same id - const [first] = await s.query(this.MutationEvent, predicate); + const [first] = await s.query(this._MutationEvent, predicate); // No other record with same modelId, so enqueue if (first === undefined) { await s.save(mutationEvent, undefined, this.ownSymbol); + return; } @@ -64,7 +67,7 @@ class MutationEventOutbox { if (first.operation === TransformerMutationType.CREATE) { if (incomingMutationType === TransformerMutationType.DELETE) { - await s.delete(this.MutationEvent, predicate); + await s.delete(this._MutationEvent, predicate); } else { // first gets updated with the incoming mutation's data, condition intentionally skipped @@ -72,7 +75,7 @@ class MutationEventOutbox { // data loss, since update mutations only include changed fields const merged = this.mergeUserFields(first, mutationEvent); await s.save( - this.MutationEvent.copyOf(first, draft => { + this._MutationEvent.copyOf(first, draft => { draft.data = merged.data; }), undefined, @@ -89,7 +92,7 @@ class MutationEventOutbox { merged = this.mergeUserFields(first, mutationEvent); // delete all for model - await s.delete(this.MutationEvent, predicate); + await s.delete(this._MutationEvent, predicate); } merged = merged! || mutationEvent; @@ -125,7 +128,7 @@ class MutationEventOutbox { * @param storage */ public async peek(storage: StorageFacade): Promise { - const head = await storage.queryOne(this.MutationEvent, QueryOne.FIRST); + const head = await storage.queryOne(this._MutationEvent, QueryOne.FIRST); this.inProgressMutationEventId = head ? head.id : undefined!; @@ -143,7 +146,7 @@ class MutationEventOutbox { const modelId = getIdentifierValue(userModelDefinition, model); const mutationEvents = await storage.query( - this.MutationEvent, + this._MutationEvent, ModelPredicateCreator.createFromAST(mutationEventModelDefinition, { and: { modelId: { eq: modelId } }, }), @@ -153,7 +156,7 @@ class MutationEventOutbox { } public async getModelIds(storage: StorageFacade): Promise> { - const mutationEvents = await storage.query(this.MutationEvent); + const mutationEvents = await storage.query(this._MutationEvent); const result = new Set(); @@ -205,10 +208,9 @@ class MutationEventOutbox { } const mutationEventModelDefinition = - this.schema.namespaces[SYNC].models['MutationEvent']; + this.schema.namespaces[SYNC].models.MutationEvent; - const userModelDefinition = - this.schema.namespaces['user'].models[head.model]; + const userModelDefinition = this.schema.namespaces.user.models[head.model]; const recordId = getIdentifierValue(userModelDefinition, record); @@ -223,7 +225,7 @@ class MutationEventOutbox { ); const outdatedMutations = await storage.query( - this.MutationEvent, + this._MutationEvent, predicate, ); @@ -236,16 +238,16 @@ class MutationEventOutbox { const newData = { ...oldData, _version, _lastChangedAt }; - return this.MutationEvent.copyOf(m, draft => { + return this._MutationEvent.copyOf(m, draft => { draft.data = JSON.stringify(newData); }); }); - await storage.delete(this.MutationEvent, predicate); + await storage.delete(this._MutationEvent, predicate); await Promise.all( - reconciledMutations.map( - async m => await storage.save(m, undefined, this.ownSymbol), + reconciledMutations.map(async m => + storage.save(m, undefined, this.ownSymbol), ), ); } @@ -273,13 +275,13 @@ class MutationEventOutbox { ...currentData, }); - return this.modelInstanceCreator(this.MutationEvent, { + return this.modelInstanceCreator(this._MutationEvent, { ...current, data, }); } - /* + /* if a model is using custom timestamp fields the custom field names will be stored in the model attributes diff --git a/packages/datastore/src/sync/processors/errorMaps.ts b/packages/datastore/src/sync/processors/errorMaps.ts index b67a0de5cfb..1714c7288b5 100644 --- a/packages/datastore/src/sync/processors/errorMaps.ts +++ b/packages/datastore/src/sync/processors/errorMaps.ts @@ -16,6 +16,7 @@ export const mutationErrorMap: ErrorMap = { BadModel: () => false, BadRecord: error => { const { message } = error; + return ( /^Cannot return \w+ for [\w-_]+ type/.test(message) || /^Variable '.+' has coerced Null value for NonNull type/.test(message) @@ -34,10 +35,12 @@ export const subscriptionErrorMap: ErrorMap = { ConfigError: () => false, Transient: observableError => { const error = unwrapObservableError(observableError); + return connectionTimeout(error) || serverError(error); }, Unauthorized: observableError => { const error = unwrapObservableError(observableError); + return /Connection failed.+Unauthorized/.test(error.message); }, }; @@ -60,6 +63,7 @@ function unwrapObservableError(observableError: any) { const { errors: [error], } = ({ + // eslint-disable-next-line no-empty-pattern errors: [], } = observableError); @@ -92,5 +96,6 @@ export function mapErrorToType(errorMap: ErrorMap, error: Error): ErrorType { return errorType; } } + return 'Unknown'; } diff --git a/packages/datastore/src/sync/processors/mutation.ts b/packages/datastore/src/sync/processors/mutation.ts index 18619d5f251..bc38e26e14e 100644 --- a/packages/datastore/src/sync/processors/mutation.ts +++ b/packages/datastore/src/sync/processors/mutation.ts @@ -3,61 +3,61 @@ import { GraphQLResult } from '@aws-amplify/api'; import { InternalAPI } from '@aws-amplify/api/internals'; import { + BackgroundProcessManager, Category, CustomUserAgentDetails, DataStoreAction, - jitteredBackoff, + GraphQLAuthMode, NonRetryableError, + jitteredBackoff, retry, - BackgroundProcessManager, - GraphQLAuthMode, - AmplifyError, } from '@aws-amplify/core/internals/utils'; - import { Observable, Observer } from 'rxjs'; +import { ConsoleLogger } from '@aws-amplify/core'; + import { MutationEvent } from '../'; import { ModelInstanceCreator } from '../../datastore/datastore'; import { ExclusiveStorage as Storage } from '../../storage/storage'; import { + AmplifyContext, AuthModeStrategy, ConflictHandler, DISCARD, ErrorHandler, GraphQLCondition, InternalSchema, - isModelFieldType, - isTargetNameAssociation, ModelInstanceMetadata, OpType, PersistentModel, PersistentModelConstructor, + ProcessName, SchemaModel, TypeConstructorMap, - ProcessName, - AmplifyContext, + isModelFieldType, + isTargetNameAssociation, } from '../../types'; -import { extractTargetNamesFromSrc, USER, ID } from '../../util'; +import { ID, USER, extractTargetNamesFromSrc } from '../../util'; import { MutationEventOutbox } from '../outbox'; import { + TransformerMutationType, buildGraphQLOperation, createMutationInstanceFromModelOperation, getModelAuthModes, - TransformerMutationType, getTokenForCustomAuth, } from '../utils'; + import { getMutationErrorType } from './errorMaps'; -import { ConsoleLogger } from '@aws-amplify/core'; const MAX_ATTEMPTS = 10; const logger = new ConsoleLogger('DataStore'); -type MutationProcessorEvent = { +interface MutationProcessorEvent { operation: TransformerMutationType; modelDefinition: SchemaModel; model: PersistentModel; hasMore: boolean; -}; +} class MutationProcessor { /** @@ -73,7 +73,8 @@ class MutationProcessor { SchemaModel, [TransformerMutationType, string, string][] >(); - private processing: boolean = false; + + private processing = false; private runningProcesses = new BackgroundProcessManager(); @@ -83,7 +84,7 @@ class MutationProcessor { private readonly userClasses: TypeConstructorMap, private readonly outbox: MutationEventOutbox, private readonly modelInstanceCreator: ModelInstanceCreator, - private readonly MutationEvent: PersistentModelConstructor, + private readonly _MutationEvent: PersistentModelConstructor, private readonly amplifyConfig: Record = {}, private readonly authModeStrategy: AuthModeStrategy, private readonly errorHandler: ErrorHandler, @@ -216,7 +217,7 @@ class MutationProcessor { data, condition, modelConstructor, - this.MutationEvent, + this._MutationEvent, head, operationAuthModes[authModeAttempts], onTerminate, @@ -236,6 +237,7 @@ class MutationProcessor { }`, ); try { + // eslint-disable-next-line @typescript-eslint/no-confusing-void-expression await this.errorHandler({ recoverySuggestion: 'Ensure app code is up to date, auth directives exist and are correct on each model, and that server-side data has not been invalidated by a schema change. If the problem persists, search for or create an issue: https://github.com/aws-amplify/amplify-js/issues', @@ -260,7 +262,8 @@ class MutationProcessor { operationAuthModes[authModeAttempts] }`, ); - return await authModeRetry(); + + return authModeRetry(); } }; @@ -313,30 +316,30 @@ class MutationProcessor { data: string, condition: string, modelConstructor: PersistentModelConstructor, - MutationEvent: PersistentModelConstructor, + MutationEventCtor: PersistentModelConstructor, mutationEvent: MutationEvent, authMode: GraphQLAuthMode, onTerminate: Promise, ): Promise< [GraphQLResult>, string, SchemaModel] > { - return await retry( + return retry( async ( - model: string, - operation: TransformerMutationType, - data: string, - condition: string, - modelConstructor: PersistentModelConstructor, - MutationEvent: PersistentModelConstructor, - mutationEvent: MutationEvent, + retriedModel: string, + retriedOperation: TransformerMutationType, + retriedData: string, + retriedCondition: string, + retriedModelConstructor: PersistentModelConstructor, + retiredMutationEventCtor: PersistentModelConstructor, + retiredMutationEvent: MutationEvent, ) => { const [query, variables, graphQLCondition, opName, modelDefinition] = this.createQueryVariables( namespaceName, - model, - operation, - data, - condition, + retriedModel, + retriedOperation, + retriedData, + retriedCondition, ); const authToken = await getTokenForCustomAuth( @@ -352,7 +355,7 @@ class MutationProcessor { }; let attempt = 0; - const opType = this.opTypeFromTransformerOperation(operation); + const opType = this.opTypeFromTransformerOperation(retriedOperation); const customUserAgentDetails: CustomUserAgentDetails = { category: Category.DataStore, @@ -361,13 +364,11 @@ class MutationProcessor { do { try { - const result = >>( - await this.amplifyContext.InternalAPI.graphql( - tryWith, - undefined, - customUserAgentDetails, - ) - ); + const result = (await this.amplifyContext.InternalAPI.graphql( + tryWith, + undefined, + customUserAgentDetails, + )) as GraphQLResult>; // Use `as any` because TypeScript doesn't seem to like passing tuples // through generic params. @@ -402,20 +403,20 @@ class MutationProcessor { } else { try { retryWith = await this.conflictHandler!({ - modelConstructor, + modelConstructor: retriedModelConstructor, localModel: this.modelInstanceCreator( - modelConstructor, + retriedModelConstructor, variables.input, ), remoteModel: this.modelInstanceCreator( - modelConstructor, + retriedModelConstructor, error.data, ), operation: opType, attempts: attempt, }); - } catch (err) { - logger.warn('conflict trycatch', err); + } catch (caughtErr) { + logger.warn('conflict trycatch', caughtErr); continue; } } @@ -423,33 +424,32 @@ class MutationProcessor { if (retryWith === DISCARD) { // Query latest from server and notify merger - const [[, opName, query]] = buildGraphQLOperation( + const [[, builtOpName, builtQuery]] = buildGraphQLOperation( this.schema.namespaces[namespaceName], modelDefinition, 'GET', ); - const authToken = await getTokenForCustomAuth( + const newAuthToken = await getTokenForCustomAuth( authMode, this.amplifyConfig, ); - const serverData = < - GraphQLResult> - >await this.amplifyContext.InternalAPI.graphql( - { - query, - variables: { id: variables.input.id }, - authMode, - authToken, - }, - undefined, - customUserAgentDetails, - ); + const serverData = + (await this.amplifyContext.InternalAPI.graphql( + { + query: builtQuery, + variables: { id: variables.input.id }, + authMode, + authToken: newAuthToken, + }, + undefined, + customUserAgentDetails, + )) as GraphQLResult>; // onTerminate cancel graphql() - return [serverData, opName, modelDefinition]; + return [serverData, builtOpName, modelDefinition]; } const namespace = this.schema.namespaces[namespaceName]; @@ -460,12 +460,12 @@ class MutationProcessor { namespace.relationships!, modelDefinition, opType, - modelConstructor, + retriedModelConstructor, retryWith, graphQLCondition, - MutationEvent, + retiredMutationEventCtor, this.modelInstanceCreator, - mutationEvent.id, + retiredMutationEvent.id, ); await this.storage.save(updatedMutation); @@ -478,19 +478,23 @@ class MutationProcessor { 'Ensure app code is up to date, auth directives exist and are correct on each model, and that server-side data has not been invalidated by a schema change. If the problem persists, search for or create an issue: https://github.com/aws-amplify/amplify-js/issues', localModel: variables.input, message: error.message, - operation, + operation: retriedOperation, errorType: getMutationErrorType(error), errorInfo: error.errorInfo, process: ProcessName.mutate, cause: error, remoteModel: error.data - ? this.modelInstanceCreator(modelConstructor, error.data) + ? this.modelInstanceCreator( + retriedModelConstructor, + error.data, + ) : null!, }); - } catch (err) { - logger.warn('Mutation error handler failed with:', err); + } catch (caughtErr) { + logger.warn('Mutation error handler failed with:', caughtErr); } finally { // Return empty tuple, dequeues the mutation + // eslint-disable-next-line no-unsafe-finally return error.data ? [ { data: { [opName]: error.data } }, @@ -506,6 +510,7 @@ class MutationProcessor { throw new NonRetryableError(err); } } + // eslint-disable-next-line no-unmodified-loop-condition } while (tryWith); }, [ @@ -514,7 +519,7 @@ class MutationProcessor { data, condition, modelConstructor, - MutationEvent, + MutationEventCtor, mutationEvent, ], safeJitteredBackoff, @@ -543,7 +548,9 @@ class MutationProcessor { ([transformerMutationType]) => transformerMutationType === operation, )!; - const { _version, ...parsedData } = JSON.parse(data); + const { _version, ...parsedData } = JSON.parse( + data, + ) as ModelInstanceMetadata; // include all the fields that comprise a custom PK if one is specified const deleteInput = {}; @@ -552,14 +559,14 @@ class MutationProcessor { deleteInput[pkField] = parsedData[pkField]; } } else { - deleteInput[ID] = (parsedData).id; + deleteInput[ID] = (parsedData as any).id; } let mutationInput; if (operation === TransformerMutationType.DELETE) { // For DELETE mutations, only the key(s) are included in the input - mutationInput = deleteInput; + mutationInput = deleteInput as ModelInstanceMetadata; } else { // Otherwise, we construct the mutation input with the following logic mutationInput = {}; @@ -598,7 +605,7 @@ class MutationProcessor { // scalar fields / non-model types if (operation === TransformerMutationType.UPDATE) { - if (!parsedData.hasOwnProperty(name)) { + if (!Object.prototype.hasOwnProperty.call(parsedData, name)) { // for update mutations - strip out a field if it's unchanged continue; } @@ -615,7 +622,7 @@ class MutationProcessor { _version, }; - const graphQLCondition = JSON.parse(condition); + const graphQLCondition = JSON.parse(condition) as GraphQLCondition; const variables = { input, @@ -628,6 +635,7 @@ class MutationProcessor { : null, }), }; + return [query, variables, graphQLCondition, opName, modelDefinition]; } diff --git a/packages/datastore/src/sync/processors/subscription.ts b/packages/datastore/src/sync/processors/subscription.ts index 6e90ed7de32..ac3760255d0 100644 --- a/packages/datastore/src/sync/processors/subscription.ts +++ b/packages/datastore/src/sync/processors/subscription.ts @@ -3,53 +3,49 @@ import { GraphQLResult } from '@aws-amplify/api'; import { InternalAPI } from '@aws-amplify/api/internals'; import { + ConsoleLogger, Hub, HubCapsule, fetchAuthSession, - ConsoleLogger, } from '@aws-amplify/core'; import { + BackgroundProcessManager, Category, CustomUserAgentDetails, DataStoreAction, - BackgroundProcessManager, GraphQLAuthMode, - AmplifyError, JwtPayload, } from '@aws-amplify/core/internals/utils'; - import { Observable, Observer, SubscriptionLike } from 'rxjs'; +import { CONTROL_MSG as PUBSUB_CONTROL_MSG } from '@aws-amplify/api-graphql'; + import { + AmplifyContext, + AuthModeStrategy, + ErrorHandler, InternalSchema, + ModelPredicate, PersistentModel, - SchemaModel, - SchemaNamespace, PredicatesGroup, - ModelPredicate, - AuthModeStrategy, - ErrorHandler, ProcessName, - AmplifyContext, + SchemaModel, + SchemaNamespace, } from '../../types'; import { + RTFError, + TransformerMutationType, buildSubscriptionGraphQLOperation, + generateRTFRemediation, getAuthorizationRules, getModelAuthModes, - getUserGroupsFromToken, - TransformerMutationType, getTokenForCustomAuth, + getUserGroupsFromToken, predicateToGraphQLFilter, - dynamicAuthFields, - filterFields, - repeatedFieldInGroup, - countFilterCombinations, - RTFError, - generateRTFRemediation, } from '../utils'; import { ModelPredicateCreator } from '../../predicates'; import { validatePredicate } from '../../util'; + import { getSubscriptionErrorType } from './errorMaps'; -import { CONTROL_MSG as PUBSUB_CONTROL_MSG } from '@aws-amplify/api-graphql'; const logger = new ConsoleLogger('DataStore'); @@ -63,20 +59,22 @@ export enum USER_CREDENTIALS { 'auth', } -type AuthorizationInfo = { +interface AuthorizationInfo { authMode: GraphQLAuthMode; isOwner: boolean; ownerField?: string; ownerValue?: string; -}; +} class SubscriptionProcessor { private readonly typeQuery = new WeakMap< SchemaModel, [TransformerMutationType, string, string][] >(); + private buffer: [TransformerMutationType, SchemaModel, PersistentModel][] = []; + private dataObserver!: Observer; private runningProcesses = new BackgroundProcessManager(); @@ -102,7 +100,7 @@ class SubscriptionProcessor { userCredentials: USER_CREDENTIALS, oidcTokenPayload: JwtPayload | undefined, authMode: GraphQLAuthMode, - filterArg: boolean = false, + filterArg = false, ): { opType: TransformerMutationType; opName: string; @@ -130,6 +128,7 @@ class SubscriptionProcessor { ownerField!, filterArg, ); + return { authMode, opType, opName, query, isOwner, ownerField, ownerValue }; } @@ -164,6 +163,7 @@ class SubscriptionProcessor { const validGroup = (authMode === 'oidc' || authMode === 'userPool') && + // eslint-disable-next-line array-callback-return groupAuthRules.find(groupAuthRule => { // validate token against groupClaim if (oidcTokenPayload) { @@ -233,7 +233,7 @@ class SubscriptionProcessor { } private hubQueryCompletionListener( - completed: Function, + completed: () => void, capsule: HubCapsule<'datastore', { event: string }>, ) { const { @@ -257,13 +257,14 @@ class SubscriptionProcessor { // Creating subs for each model/operation combo so they can be unsubscribed // independently, since the auth retry behavior is asynchronous. - let subscriptions: { - [modelName: string]: { + let subscriptions: Record< + string, + { [TransformerMutationType.CREATE]: SubscriptionLike[]; [TransformerMutationType.UPDATE]: SubscriptionLike[]; [TransformerMutationType.DELETE]: SubscriptionLike[]; - }; - } = {}; + } + > = {}; let oidcTokenPayload: JwtPayload | undefined; let userCredentials = USER_CREDENTIALS.none; this.runningProcesses.add(async () => { @@ -369,7 +370,7 @@ class SubscriptionProcessor { }; if (addFilter && predicatesGroup) { - variables['filter'] = + (variables as any).filter = predicateToGraphQLFilter(predicatesGroup); } @@ -378,6 +379,7 @@ class SubscriptionProcessor { observer.error( 'Owner field required, sign in is needed in order to perform this operation', ); + return; } @@ -390,18 +392,19 @@ class SubscriptionProcessor { }`, ); - const queryObservable = < - Observable>> - >(this.amplifyContext.InternalAPI.graphql( - { - query, - variables, - ...{ authMode }, - authToken, - }, - undefined, - customUserAgentDetails, - )); + const queryObservable = + this.amplifyContext.InternalAPI.graphql( + { + query, + variables, + ...{ authMode }, + authToken, + }, + undefined, + customUserAgentDetails, + ) as unknown as Observable< + GraphQLResult> + >; let subscriptionReadyCallback: (param?: unknown) => void; @@ -414,11 +417,11 @@ class SubscriptionProcessor { next: result => { const { data, errors } = result; if (Array.isArray(errors) && errors.length > 0) { - const messages = (< - { + const messages = ( + errors as { message: string; }[] - >errors).map(({ message }) => message); + ).map(({ message }) => message); logger.warn( `Skipping incoming subscription. Messages: ${messages.join( @@ -427,16 +430,16 @@ class SubscriptionProcessor { ); this.drainBuffer(); + return; } - const predicatesGroup = + const resolvedPredicatesGroup = ModelPredicateCreator.getPredicates( this.syncPredicates.get(modelDefinition)!, false, ); - // @ts-ignore const { [opName]: record } = data; // checking incoming subscription against syncPredicate. @@ -446,7 +449,7 @@ class SubscriptionProcessor { if ( this.passesPredicateValidation( record, - predicatesGroup!, + resolvedPredicatesGroup!, ) ) { this.pushToBuffer( @@ -461,6 +464,7 @@ class SubscriptionProcessor { const { errors: [{ message = '' } = {}], } = ({ + // eslint-disable-next-line no-empty-pattern errors: [], } = subscriptionError); @@ -488,6 +492,7 @@ class SubscriptionProcessor { // retry subscription connection without filter subscriptionRetry(operation, false); + return; } @@ -537,6 +542,7 @@ class SubscriptionProcessor { }`, ); subscriptionRetry(operation); + return; } } @@ -544,6 +550,7 @@ class SubscriptionProcessor { logger.warn('subscriptionError', message); try { + // eslint-disable-next-line @typescript-eslint/no-confusing-void-expression await this.errorHandler({ recoverySuggestion: 'Ensure app code is up to date, auth directives exist and are correct on each model, and that server-side data has not been invalidated by a schema change. If the problem persists, search for or create an issue: https://github.com/aws-amplify/amplify-js/issues', @@ -583,11 +590,11 @@ class SubscriptionProcessor { (async () => { let boundFunction: any; let removeBoundFunctionListener: () => void; - await new Promise(res => { - subscriptionReadyCallback = res; + await new Promise(resolve => { + subscriptionReadyCallback = resolve; boundFunction = this.hubQueryCompletionListener.bind( this, - res, + resolve, ); removeBoundFunctionListener = Hub.listen( 'api', @@ -615,13 +622,19 @@ class SubscriptionProcessor { return this.runningProcesses.addCleaner(async () => { Object.keys(subscriptions).forEach(modelName => { subscriptions[modelName][TransformerMutationType.CREATE].forEach( - subscription => subscription.unsubscribe(), + subscription => { + subscription.unsubscribe(); + }, ); subscriptions[modelName][TransformerMutationType.UPDATE].forEach( - subscription => subscription.unsubscribe(), + subscription => { + subscription.unsubscribe(); + }, ); subscriptions[modelName][TransformerMutationType.DELETE].forEach( - subscription => subscription.unsubscribe(), + subscription => { + subscription.unsubscribe(); + }, ); }); }); @@ -669,7 +682,9 @@ class SubscriptionProcessor { private drainBuffer() { if (this.dataObserver) { - this.buffer.forEach(data => this.dataObserver.next!(data)); + this.buffer.forEach(data => { + this.dataObserver.next!(data); + }); this.buffer = []; } } @@ -711,6 +726,7 @@ class SubscriptionProcessor { ); logger.warn(`${header}\n${message}\n${remediationMessage}`); + return true; } diff --git a/packages/datastore/src/sync/processors/sync.ts b/packages/datastore/src/sync/processors/sync.ts index d11ca8d4b82..319e153cb50 100644 --- a/packages/datastore/src/sync/processors/sync.ts +++ b/packages/datastore/src/sync/processors/sync.ts @@ -4,40 +4,40 @@ import { GraphQLResult } from '@aws-amplify/api'; import { InternalAPI } from '@aws-amplify/api/internals'; import { Observable } from 'rxjs'; import { + BackgroundProcessManager, + Category, + CustomUserAgentDetails, + DataStoreAction, + GraphQLAuthMode, + NonRetryableError, + jitteredExponentialRetry, +} from '@aws-amplify/core/internals/utils'; +import { ConsoleLogger, Hub } from '@aws-amplify/core'; + +import { + AmplifyContext, + AuthModeStrategy, + ErrorHandler, + GraphQLFilter, InternalSchema, ModelInstanceMetadata, - SchemaModel, ModelPredicate, PredicatesGroup, - GraphQLFilter, - AuthModeStrategy, - ErrorHandler, ProcessName, - AmplifyContext, + SchemaModel, } from '../../types'; import { buildGraphQLOperation, - getModelAuthModes, getClientSideAuthError, getForbiddenError, - predicateToGraphQLFilter, + getModelAuthModes, getTokenForCustomAuth, + predicateToGraphQLFilter, } from '../utils'; -import { - jitteredExponentialRetry, - Category, - CustomUserAgentDetails, - DataStoreAction, - NonRetryableError, - BackgroundProcessManager, - GraphQLAuthMode, - AmplifyError, -} from '@aws-amplify/core/internals/utils'; - -import { Amplify, ConsoleLogger, Hub } from '@aws-amplify/core'; - import { ModelPredicateCreator } from '../../predicates'; + import { getSyncErrorType } from './errorMaps'; + const opResultDefaults = { items: [], nextToken: null, @@ -149,6 +149,7 @@ class SyncProcessor { logger.debug( `Sync successful with authMode: ${readAuthModes[authModeAttempts]}`, ); + return response; } catch (error) { authModeAttempts++; @@ -174,7 +175,8 @@ class SyncProcessor { readAuthModes[authModeAttempts - 1] }. Retrying with authMode: ${readAuthModes[authModeAttempts]}`, ); - return await authModeRetry(); + + return authModeRetry(); } }; @@ -206,16 +208,19 @@ class SyncProcessor { authMode: GraphQLAuthMode; onTerminate: Promise; }): Promise< - GraphQLResult<{ - [opName: string]: { - items: T[]; - nextToken: string; - startedAt: number; - }; - }> + GraphQLResult< + Record< + string, + { + items: T[]; + nextToken: string; + startedAt: number; + } + > + > > { - return await jitteredExponentialRetry( - async (query, variables) => { + return jitteredExponentialRetry( + async (retriedQuery, retriedVariables) => { try { const authToken = await getTokenForCustomAuth( authMode, @@ -229,8 +234,8 @@ class SyncProcessor { return await this.amplifyContext.InternalAPI.graphql( { - query, - variables, + query: retriedQuery, + variables: retriedVariables, authMode, authToken, }, @@ -275,6 +280,7 @@ class SyncProcessor { await Promise.all( otherErrors.map(async err => { try { + // eslint-disable-next-line @typescript-eslint/no-confusing-void-expression await this.errorHandler({ recoverySuggestion: 'Ensure app code is up to date, auth directives exist and are correct on each model, and that server-side data has not been invalidated by a schema change. If the problem persists, search for or create an issue: https://github.com/aws-amplify/amplify-js/issues', @@ -368,6 +374,7 @@ class SyncProcessor { const typeLastSync = typesLastSync.get(namespace.models[modelName]); map.set(namespace.models[modelName], typeLastSync!); } + return map; }, new Map(), @@ -394,7 +401,8 @@ class SyncProcessor { parentPromises.get(`${namespace}_${parent}`), ); - const promise = new Promise(async res => { + // eslint-disable-next-line no-async-promise-executor + const promise = new Promise(async resolve => { await Promise.all(promises); do { @@ -407,7 +415,10 @@ class SyncProcessor { logger.debug( `Sync processor has been stopped, terminating sync for ${modelDefinition.name}`, ); - return res(); + + resolve(); + + return; } const limit = Math.min( @@ -431,6 +442,7 @@ class SyncProcessor { )); } catch (error) { try { + // eslint-disable-next-line @typescript-eslint/no-confusing-void-expression await this.errorHandler({ recoverySuggestion: 'Ensure app code is up to date, auth directives exist and are correct on each model, and that server-side data has not been invalidated by a schema change. If the problem persists, search for or create an issue: https://github.com/aws-amplify/amplify-js/issues', @@ -472,7 +484,7 @@ class SyncProcessor { }); } while (!done); - res(); + resolve(); }); parentPromises.set( @@ -500,13 +512,13 @@ class SyncProcessor { } } -export type SyncModelPage = { +export interface SyncModelPage { namespace: string; modelDefinition: SchemaModel; items: ModelInstanceMetadata[]; startedAt: number; done: boolean; isFullSync: boolean; -}; +} export { SyncProcessor }; diff --git a/packages/datastore/src/sync/utils.ts b/packages/datastore/src/sync/utils.ts index ec6c4adf751..2e538ab0efa 100644 --- a/packages/datastore/src/sync/utils.ts +++ b/packages/datastore/src/sync/utils.ts @@ -3,53 +3,55 @@ import { GraphQLAuthError } from '@aws-amplify/api'; import type { GraphQLError } from 'graphql'; import { GraphQLAuthMode } from '@aws-amplify/core/internals/utils'; +import { ConsoleLogger } from '@aws-amplify/core'; + import { ModelInstanceCreator } from '../datastore/datastore'; import { + AuthModeStrategy, AuthorizationRule, GraphQLCondition, - GraphQLFilter, GraphQLField, - isEnumFieldType, - isGraphQLScalarType, - isPredicateObj, - isSchemaModel, - isSchemaModelWithAttributes, - isTargetNameAssociation, - isNonModelFieldType, + GraphQLFilter, + InternalSchema, + ModelAttributes, ModelFields, ModelInstanceMetadata, + ModelOperation, OpType, PersistentModel, PersistentModelConstructor, - PredicatesGroup, PredicateObject, + PredicatesGroup, RelationshipType, SchemaModel, SchemaNamespace, SchemaNonModel, - ModelOperation, - InternalSchema, - AuthModeStrategy, - ModelAttributes, + isEnumFieldType, + isGraphQLScalarType, + isNonModelFieldType, isPredicateGroup, + isPredicateObj, + isSchemaModel, + isSchemaModelWithAttributes, + isTargetNameAssociation, } from '../types'; import { - extractPrimaryKeyFieldNames, - establishRelationAndKeys, IDENTIFIER_KEY_SEPARATOR, + establishRelationAndKeys, + extractPrimaryKeyFieldNames, } from '../util'; + import { MutationEvent } from './'; -import { ConsoleLogger } from '@aws-amplify/core'; const logger = new ConsoleLogger('DataStore'); -enum GraphQLOperationType { - LIST = 'query', - CREATE = 'mutation', - UPDATE = 'mutation', - DELETE = 'mutation', - GET = 'query', -} +const GraphQLOperationType = { + LIST: 'query', + CREATE: 'mutation', + UPDATE: 'mutation', + DELETE: 'mutation', + GET: 'query', +}; export enum TransformerMutationType { CREATE = 'Create', @@ -64,10 +66,10 @@ const dummyMetadata: ModelInstanceMetadata = { _deleted: undefined!, }; -const metadataFields = <(keyof ModelInstanceMetadata)[]>( - Object.keys(dummyMetadata) -); -export function getMetadataFields(): ReadonlyArray { +const metadataFields = Object.keys( + dummyMetadata, +) as (keyof ModelInstanceMetadata)[]; +export function getMetadataFields(): readonly string[] { return metadataFields; } @@ -107,6 +109,7 @@ function getImplicitOwnerField( if (!scalarFields.owner && ownerFields.includes('owner')) { return ['owner']; } + return []; } @@ -117,13 +120,16 @@ function getOwnerFields( if (isSchemaModelWithAttributes(modelDefinition)) { modelDefinition.attributes!.forEach(attr => { if (attr.properties && attr.properties.rules) { - const rule = attr.properties.rules.find(rule => rule.allow === 'owner'); + const rule = attr.properties.rules.find( + currentRule => currentRule.allow === 'owner', + ); if (rule && rule.ownerField) { ownerFields.push(rule.ownerField); } } }); } + return ownerFields; } @@ -173,11 +179,12 @@ function getConnectionFields( // Need to retrieve relations in order to get connected model keys const [relations] = establishRelationAndKeys(namespace); - const connectedModelName = - modelDefinition.fields[name].type['model']; + const connectedModelName = ( + modelDefinition.fields[name].type as any + ).model; const byPkIndex = relations[connectedModelName].indexes.find( - ([name]) => name === 'byPk', + ([currentName]) => currentName === 'byPk', ); const keyFields = byPkIndex && byPkIndex[1]; const keyFieldSelectionSet = keyFields?.join(' '); @@ -208,17 +215,18 @@ function getNonModelFields( if (isNonModelFieldType(type)) { const typeDefinition = namespace.nonModels![type.nonModel]; const scalarFields = Object.values(getScalarFields(typeDefinition)).map( - ({ name }) => name, + ({ name: currentName }) => currentName, ); const nested: string[] = []; Object.values(typeDefinition.fields).forEach(field => { - const { type, name } = field; + const { type: fieldType, name: fieldName } = field; - if (isNonModelFieldType(type)) { - const typeDefinition = namespace.nonModels![type.nonModel]; + if (isNonModelFieldType(fieldType)) { + const nonModelTypeDefinition = + namespace.nonModels![fieldType.nonModel]; nested.push( - `${name} { ${generateSelectionSet(namespace, typeDefinition)} }`, + `${fieldName} { ${generateSelectionSet(namespace, nonModelTypeDefinition)} }`, ); } }); @@ -293,6 +301,7 @@ export function getAuthorizationRules( if (isOwnerAuth) { // owner rules has least priority resultRules.push(authRule); + return; } @@ -308,7 +317,7 @@ export function buildSubscriptionGraphQLOperation( transformerMutationType: TransformerMutationType, isOwnerAuthorization: boolean, ownerField: string, - filterArg: boolean = false, + filterArg = false, ): [TransformerMutationType, string, string] { const selectionSet = generateSelectionSet(namespace, modelDefinition); @@ -453,6 +462,7 @@ export function createMutationInstanceFromModelOperation< if (isAWSJSON) { return JSON.stringify(v); } + return v; }; @@ -491,12 +501,13 @@ export function predicateToGraphQLCondition( // key fields from the predicate/condition when ALL of the keyFields are present and using `eq` operators const keyFields = extractPrimaryKeyFieldNames(modelDefinition); + return predicateToGraphQLFilter(predicate, keyFields) as GraphQLCondition; } /** * @param predicatesGroup - Predicate Group @returns GQL Filter Expression from Predicate Group - + @remarks Flattens redundant list predicates @example @@ -537,6 +548,7 @@ export function predicateToGraphQLFilter( }; children.push(gqlField); + return; } @@ -557,6 +569,7 @@ export function predicateToGraphQLFilter( ) { delete result[type]; Object.assign(result, child); + return result; } } @@ -693,6 +706,7 @@ export function repeatedFieldInGroup( } seen[fieldName] = true; } + return null; }; @@ -779,6 +793,7 @@ export function generateRTFRemediation( `Dynamic auth modes, such as owner auth and dynamic group auth factor in to the number of combinations you're using.\n` + `You currently have ${dynamicAuthModeFields.size} dynamic auth mode(s) configured on this model: ${dynamicAuthFieldsStr}.`; } + return message; } @@ -796,7 +811,7 @@ export function generateRTFRemediation( } export function getUserGroupsFromToken( - token: { [field: string]: any }, + token: Record, rule: AuthorizationRule, ): string[] { // validate token against groupClaim @@ -861,6 +876,7 @@ export async function getModelAuthModes({ } catch (error) { logger.debug(`Error getting auth modes for model: ${modelName}`, error); } + return modelAuthModes; } @@ -883,12 +899,13 @@ export function getForbiddenError(error) { )}` ); } + return null; } export function resolveServiceErrorStatusCode(error: unknown): number | null { - if (error?.['$metadata']?.['httpStatusCode']) { - return Number(error?.['$metadata']?.['httpStatusCode']); + if ((error as any)?.$metadata?.httpStatusCode) { + return Number((error as any)?.$metadata?.httpStatusCode); } else if ((error as GraphQLError)?.originalError) { return resolveServiceErrorStatusCode( (error as GraphQLError)?.originalError, @@ -906,6 +923,7 @@ export function getClientSideAuthError(error) { clientSideAuthErrors.find(clientError => error.message.includes(clientError), ); + return clientSideError || null; } @@ -920,6 +938,7 @@ export async function getTokenForCustomAuth( if (functionAuthProvider && typeof functionAuthProvider === 'function') { try { const { token } = await functionAuthProvider(); + return token; } catch (error) { throw new Error( diff --git a/packages/datastore/src/types.ts b/packages/datastore/src/types.ts index e443726cdd7..75e0b9ff27e 100644 --- a/packages/datastore/src/types.ts +++ b/packages/datastore/src/types.ts @@ -1,27 +1,28 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { InternalAPI } from '@aws-amplify/api/internals'; +import { GraphQLAuthMode } from '@aws-amplify/core/internals/utils'; + import { ModelInstanceCreator } from './datastore/datastore'; import { + NAMESPACES, + extractPrimaryKeyFieldNames, isAWSDate, - isAWSTime, isAWSDateTime, - isAWSTimestamp, isAWSEmail, + isAWSIPAddress, isAWSJSON, - isAWSURL, isAWSPhone, - isAWSIPAddress, - NAMESPACES, - extractPrimaryKeyFieldNames, + isAWSTime, + isAWSTimestamp, + isAWSURL, } from './util'; import { PredicateAll } from './predicates'; -import { InternalAPI } from '@aws-amplify/api/internals'; import { Adapter } from './storage/adapter'; -import { GraphQLAuthMode } from '@aws-amplify/core/internals/utils'; -export type Scalar = T extends Array ? InnerType : T; +export type Scalar = T extends (infer InnerType)[] ? InnerType : T; -//#region Schema types +// #region Schema types /** * @deprecated If you intended to use the Schema for `generateClient`, then you've imported the wrong Schema type. * Use `import { type Schema } from '../amplify/data/resource' instead. If you intended to import the type for DataStore @@ -34,25 +35,25 @@ export type DataStoreSchema = UserSchema & { codegenVersion: string; }; -export type UserSchema = { +export interface UserSchema { models: SchemaModels; nonModels?: SchemaNonModels; relationships?: RelationshipType; keys?: ModelKeys; enums: SchemaEnums; modelTopologicalOrdering?: Map; -}; -export type InternalSchema = { +} +export interface InternalSchema { namespaces: SchemaNamespaces; version: string; codegenVersion: string; -}; +} export type SchemaNamespaces = Record; export type SchemaNamespace = UserSchema & { name: string; }; export type SchemaModels = Record; -export type SchemaModel = { +export interface SchemaModel { name: string; pluralName: string; attributes?: ModelAttributes; @@ -68,10 +69,10 @@ export type SchemaModel = { allFields?: ModelFields; syncable?: boolean; -}; +} export function isSchemaModel(obj: any): obj is SchemaModel { - return obj && (obj).pluralName !== undefined; + return obj && (obj as SchemaModel).pluralName !== undefined; } export function isSchemaModelWithAttributes( @@ -81,37 +82,37 @@ export function isSchemaModelWithAttributes( } export type SchemaNonModels = Record; -export type SchemaNonModel = { +export interface SchemaNonModel { name: string; fields: ModelFields; -}; +} type SchemaEnums = Record; -type SchemaEnum = { +interface SchemaEnum { name: string; values: string[]; -}; -export type ModelMeta = { +} +export interface ModelMeta { builder: PersistentModelConstructor; schema: SchemaModel; pkField: string[]; -}; +} export type ModelAssociation = AssociatedWith | TargetNameAssociation; -type AssociatedWith = { +interface AssociatedWith { connectionType: 'HAS_MANY' | 'HAS_ONE'; associatedWith: string | string[]; targetName?: string; targetNames?: string[]; -}; +} export function isAssociatedWith(obj: any): obj is AssociatedWith { return obj && obj.associatedWith; } -type TargetNameAssociation = { +interface TargetNameAssociation { connectionType: 'BELONGS_TO'; targetName?: string; targetNames?: string[]; -}; +} export function isTargetNameAssociation( obj: any, @@ -119,9 +120,9 @@ export function isTargetNameAssociation( return obj?.targetName || obj?.targetNames; } -type FieldAssociation = { +interface FieldAssociation { connectionType: 'HAS_ONE' | 'BELONGS_TO' | 'HAS_MANY'; -}; +} export function isFieldAssociation( obj: any, fieldName: string, @@ -130,9 +131,12 @@ export function isFieldAssociation( } export type ModelAttributes = ModelAttribute[]; -export type ModelAttribute = { type: string; properties?: Record }; +export interface ModelAttribute { + type: string; + properties?: Record; +} -export type ModelAuthRule = { +export interface ModelAuthRule { allow: string; provider?: string; operations?: string[]; @@ -141,14 +145,14 @@ export type ModelAuthRule = { groups?: string[]; groupClaim?: string; groupsField?: string; -}; +} -export type ModelAttributeAuth = { +export interface ModelAttributeAuth { type: 'auth'; properties: { rules: ModelAuthRule[]; }; -}; +} export function isModelAttributeAuth( attr: ModelAttribute, @@ -161,29 +165,29 @@ export function isModelAttributeAuth( ); } -type ModelAttributeKey = { +interface ModelAttributeKey { type: 'key'; properties: { name?: string; fields: string[]; }; -}; +} -type ModelAttributePrimaryKey = { +interface ModelAttributePrimaryKey { type: 'key'; properties: { name: never; fields: string[]; }; -}; +} -type ModelAttributeCompositeKey = { +interface ModelAttributeCompositeKey { type: 'key'; properties: { name: string; fields: [string, string, string, string?, string?]; }; -}; +} export function isModelAttributeKey( attr: ModelAttribute, @@ -212,7 +216,7 @@ export function isModelAttributeCompositeKey( ); } -export type ModelAttributeAuthProperty = { +export interface ModelAttributeAuthProperty { allow: ModelAttributeAuthAllow; identityClaim?: string; groupClaim?: string; @@ -220,7 +224,7 @@ export type ModelAttributeAuthProperty = { operations?: string[]; ownerField?: string; provider?: ModelAttributeAuthProvider; -}; +} export enum ModelAttributeAuthAllow { CUSTOM = 'custom', @@ -256,6 +260,7 @@ export enum GraphQLScalarType { AWSIPAddress, } +// eslint-disable-next-line @typescript-eslint/no-namespace export namespace GraphQLScalarType { export function getJSType( scalar: keyof Omit< @@ -318,7 +323,7 @@ export namespace GraphQLScalarType { } } -export type AuthorizationRule = { +export interface AuthorizationRule { identityClaim: string; ownerField: string; provider: 'userPools' | 'oidc' | 'iam' | 'apiKey'; @@ -327,7 +332,7 @@ export type AuthorizationRule = { groupsField: string; authStrategy: 'owner' | 'groups' | 'private' | 'public'; areSubscriptionsPublic: boolean; -}; +} export function isGraphQLScalarType( obj: any, @@ -338,11 +343,11 @@ export function isGraphQLScalarType( return obj && GraphQLScalarType[obj] !== undefined; } -export type ModelFieldType = { +export interface ModelFieldType { model: string; modelConstructor?: ModelMeta; -}; -export function isModelFieldType( +} +export function isModelFieldType<_ extends PersistentModel>( obj: any, ): obj is ModelFieldType { const modelField: keyof ModelFieldType = 'model'; @@ -351,7 +356,9 @@ export function isModelFieldType( return false; } -export type NonModelFieldType = { nonModel: string }; +export interface NonModelFieldType { + nonModel: string; +} export function isNonModelFieldType(obj: any): obj is NonModelFieldType { const typeField: keyof NonModelFieldType = 'nonModel'; if (obj && obj[typeField]) return true; @@ -359,7 +366,9 @@ export function isNonModelFieldType(obj: any): obj is NonModelFieldType { return false; } -type EnumFieldType = { enum: string }; +interface EnumFieldType { + enum: string; +} export function isEnumFieldType(obj: any): obj is EnumFieldType { const modelField: keyof EnumFieldType = 'enum'; if (obj && obj[modelField]) return true; @@ -367,7 +376,7 @@ export function isEnumFieldType(obj: any): obj is EnumFieldType { return false; } -export type ModelField = { +export interface ModelField { name: string; type: | keyof Omit< @@ -383,22 +392,20 @@ export type ModelField = { isArrayNullable?: boolean; association?: ModelAssociation; attributes?: ModelAttributes[]; -}; -//#endregion +} +// #endregion -//#region Model definition -export type NonModelTypeConstructor = { - new (init: T): T; -}; +// #region Model definition +export type NonModelTypeConstructor = new (init: T) => T; // Class for model -export type PersistentModelConstructor = { +export interface PersistentModelConstructor { new (init: ModelInit>): T; copyOf( src: T, mutator: (draft: MutableModel>) => void, ): T; -}; +} /** * @private @@ -443,7 +450,7 @@ export type OptionallyManagedIdentifier = IdentifierBrand< >; // You provide the values -export type CompositeIdentifier> = IdentifierBrand< +export type CompositeIdentifier = IdentifierBrand< { fields: K; type: T }, 'CompositeIdentifier' >; @@ -494,10 +501,10 @@ export type IdentifierFieldsForInit< // Instance of model export declare const __modelMeta__: unique symbol; -export type PersistentModelMetaData = { +export interface PersistentModelMetaData { identifier?: Identifier; readOnlyFields?: string; -}; +} export interface AsyncCollection extends AsyncIterable { toArray(options?: { max?: number }): Promise; @@ -538,19 +545,11 @@ type OptionalRelativesOf = type OmitOptionalRelatives = Omit>; type PickOptionalRelatives = Pick>; -type OmitOptionalFields = Omit< - T, - KeysOfSuperType | OptionalRelativesOf ->; -type PickOptionalFields = Pick< - T, - KeysOfSuperType | OptionalRelativesOf ->; -export type DefaultPersistentModelMetaData = { +export interface DefaultPersistentModelMetaData { identifier: ManagedIdentifier<{ id: string }, 'id'>; readOnlyFields: never; -}; +} export type MetadataOrDefault< T extends PersistentModel, @@ -578,6 +577,7 @@ export type MetadataReadOnlyFields< // This type makes optional some identifiers in the constructor init object (e.g. OptionallyManagedIdentifier) export type ModelInitBase< T extends PersistentModel, + // eslint-disable-next-line @typescript-eslint/ban-types M extends PersistentModelMetaData = {}, > = Omit< T, @@ -592,6 +592,7 @@ export type ModelInitBase< export type ModelInit< T extends PersistentModel, + // eslint-disable-next-line @typescript-eslint/ban-types M extends PersistentModelMetaData = {}, > = { [P in keyof OmitOptionalRelatives>]: SettableFieldType< @@ -617,6 +618,7 @@ type DeepWritable = { export type MutableModel< T extends PersistentModel, + // eslint-disable-next-line @typescript-eslint/ban-types M extends PersistentModelMetaData = {}, // This provides Intellisense with ALL of the properties, regardless of read-only // but will throw a linting error if trying to overwrite a read-only property @@ -625,11 +627,11 @@ export type MutableModel< > & Readonly | MetadataReadOnlyFields>>; -export type ModelInstanceMetadata = { +export interface ModelInstanceMetadata { _version: number; _lastChangedAt: number; _deleted: boolean; -}; +} export type IdentifierFieldValue< T extends PersistentModel, @@ -656,9 +658,9 @@ export function isIdentifierObject( typeof obj === 'object' && obj && keys.every(k => obj[k] !== undefined) ); } -//#endregion +// #endregion -//#region Subscription messages +// #region Subscription messages export enum OpType { INSERT = 'INSERT', UPDATE = 'UPDATE', @@ -670,21 +672,21 @@ export type SubscriptionMessage = Pick< 'opType' | 'element' | 'model' | 'condition' >; -export type InternalSubscriptionMessage = { +export interface InternalSubscriptionMessage { opType: OpType; element: T; model: PersistentModelConstructor; condition: PredicatesGroup | null; savedElement?: T; -}; +} -export type DataStoreSnapshot = { +export interface DataStoreSnapshot { items: T[]; isSynced: boolean; -}; -//#endregion +} +// #endregion -//#region Predicates +// #region Predicates export type PredicateExpression = TypeName extends keyof MapTypeToOperands @@ -695,10 +697,10 @@ export type PredicateExpression = ) => ModelPredicate : never; -type EqualityOperators = { +interface EqualityOperators { ne: T; eq: T; -}; +} type ScalarNumberOperators = EqualityOperators & { le: T; lt: T; @@ -714,22 +716,22 @@ type StringOperators = ScalarNumberOperators & { notContains: T; }; type BooleanOperators = EqualityOperators; -type ArrayOperators = { +interface ArrayOperators { contains: T; notContains: T; -}; +} export type AllOperators = NumberOperators & StringOperators & ArrayOperators; -type MapTypeToOperands = { +interface MapTypeToOperands { number: NumberOperators>; string: StringOperators>; boolean: BooleanOperators>; 'number[]': ArrayOperators; 'string[]': ArrayOperators; 'boolean[]': ArrayOperators; -}; +} type TypeName = T extends string ? 'string' @@ -745,17 +747,17 @@ type TypeName = T extends string ? 'boolean[]' : never; -export type PredicateGroups = { - and: ( +export interface PredicateGroups { + and( predicate: (predicate: ModelPredicate) => ModelPredicate, - ) => ModelPredicate; - or: ( + ): ModelPredicate; + or( predicate: (predicate: ModelPredicate) => ModelPredicate, - ) => ModelPredicate; - not: ( + ): ModelPredicate; + not( predicate: (predicate: ModelPredicate) => ModelPredicate, - ) => ModelPredicate; -}; + ): ModelPredicate; +} export type ModelPredicate = { [K in keyof M]-?: PredicateExpression>; @@ -765,38 +767,37 @@ export type ProducerModelPredicate = ( condition: ModelPredicate, ) => ModelPredicate; -export type PredicatesGroup = { +export interface PredicatesGroup { type: keyof PredicateGroups; predicates: (PredicateObject | PredicatesGroup)[]; -}; +} export function isPredicateObj( obj: any, ): obj is PredicateObject { - return obj && (>obj).field !== undefined; + return obj && (obj as PredicateObject).field !== undefined; } export function isPredicateGroup( obj: any, ): obj is PredicatesGroup { - return obj && (>obj).type !== undefined; + return obj && (obj as PredicatesGroup).type !== undefined; } -export type PredicateObject = { +export interface PredicateObject { field: keyof T; operator: keyof AllOperators; operand: any; -}; +} export enum QueryOne { FIRST, LAST, } -export type GraphQLField = { - [field: string]: { - [operator: string]: string | number | [number, number]; - }; -}; +export type GraphQLField = Record< + string, + Record +>; export type GraphQLCondition = Partial< | GraphQLField @@ -820,26 +821,26 @@ export type GraphQLFilter = Partial< } >; -//#endregion +// #endregion -//#region Pagination +// #region Pagination -export type ProducerPaginationInput = { +export interface ProducerPaginationInput { sort?: ProducerSortPredicate; limit?: number; page?: number; -}; +} export type ObserveQueryOptions = Pick< ProducerPaginationInput, 'sort' >; -export type PaginationInput = { +export interface PaginationInput { sort?: SortPredicate; limit?: number; page?: number; -}; +} export type ProducerSortPredicate = ( condition: SortPredicate, @@ -862,16 +863,16 @@ export enum SortDirection { export type SortPredicatesGroup = SortPredicateObject[]; -export type SortPredicateObject = { +export interface SortPredicateObject { field: keyof T; sortDirection: keyof typeof SortDirection; -}; +} -//#endregion +// #endregion -//#region System Components +// #region System Components -export type SystemComponent = { +export interface SystemComponent { setUp( schema: InternalSchema, namespaceResolver: NamespaceResolver, @@ -882,62 +883,61 @@ export type SystemComponent = { ) => PersistentModelConstructor, appId?: string, ): Promise; -}; +} export type NamespaceResolver = ( modelConstructor: PersistentModelConstructor, ) => string; -export type ControlMessageType = { +export interface ControlMessageType { type: T; data?: any; -}; +} -//#endregion +// #endregion -//#region Relationship types -export type RelationType = { +// #region Relationship types +export interface RelationType { fieldName: string; modelName: string; relationType: 'HAS_ONE' | 'HAS_MANY' | 'BELONGS_TO'; targetName?: string; targetNames?: string[]; associatedWith?: string | string[]; -}; +} -type IndexOptions = { +interface IndexOptions { unique?: boolean; -}; +} -export type IndexesType = Array<[string, string[], IndexOptions?]>; +export type IndexesType = [string, string[], IndexOptions?][]; -export type RelationshipType = { - [modelName: string]: { +export type RelationshipType = Record< + string, + { indexes: IndexesType; relationTypes: RelationType[]; - }; -}; + } +>; -//#endregion +// #endregion -//#region Key type -export type KeyType = { +// #region Key type +export interface KeyType { primaryKey?: string[]; compositeKeys?: Set[]; -}; +} -export type ModelKeys = { - [modelName: string]: KeyType; -}; +export type ModelKeys = Record; -//#endregion +// #endregion -//#region DataStore config types -export type DataStoreConfig = { +// #region DataStore config types +export interface DataStoreConfig { DataStore?: { authModeStrategyType?: AuthModeStrategyType; conflictHandler?: ConflictHandler; // default : retry until client wins up to x times - errorHandler?: (error: SyncError) => void; // default : logger.warn + errorHandler?(error: SyncError): void; // default : logger.warn maxRecordsToSync?: number; // merge syncPageSize?: number; fullSyncInterval?: number; @@ -947,18 +947,18 @@ export type DataStoreConfig = { }; authModeStrategyType?: AuthModeStrategyType; conflictHandler?: ConflictHandler; // default : retry until client wins up to x times - errorHandler?: (error: SyncError) => void; // default : logger.warn + errorHandler?(error: SyncError): void; // default : logger.warn maxRecordsToSync?: number; // merge syncPageSize?: number; fullSyncInterval?: number; syncExpressions?: SyncExpression[]; authProviders?: AuthProviders; storageAdapter?: Adapter; -}; +} -export type AuthProviders = { - functionAuthProvider: () => { token: string } | Promise<{ token: string }>; -}; +export interface AuthProviders { + functionAuthProvider(): { token: string } | Promise<{ token: string }>; +} export enum AuthModeStrategyType { DEFAULT = 'DEFAULT', @@ -971,11 +971,11 @@ export type AuthModeStrategyReturn = | undefined | null; -export type AuthModeStrategyParams = { +export interface AuthModeStrategyParams { schema: InternalSchema; modelName: string; operation: ModelOperation; -}; +} export type AuthModeStrategy = ( authModeStrategyParams: AuthModeStrategyParams, @@ -997,7 +997,7 @@ export type ModelAuthModes = Record< export type SyncExpression = Promise<{ modelConstructor: any; - conditionProducer: (c?: any) => any; + conditionProducer(c?: any): any; }>; /* @@ -1019,14 +1019,14 @@ type Option0 = []; type Option1 = [V5ModelPredicate | undefined]; type Option = Option0 | Option1; -type Lookup = { +interface Lookup { 0: | ModelPredicateExtender | Promise> | typeof PredicateAll | Promise; 1: PredicateInternalsKey | undefined; -}; +} type ConditionProducer> = ( ...args: A @@ -1048,15 +1048,15 @@ export async function syncExpression< }; } -export type SyncConflict = { +export interface SyncConflict { modelConstructor: PersistentModelConstructor; localModel: PersistentModel; remoteModel: PersistentModel; operation: OpType; attempts: number; -}; +} -export type SyncError = { +export interface SyncError { message: string; errorType: ErrorType; errorInfo?: string; @@ -1067,7 +1067,7 @@ export type SyncError = { process: ProcessName; operation: string; cause?: Error; -}; +} export type ErrorType = | 'ConfigError' @@ -1093,21 +1093,21 @@ export type ConflictHandler = ( | typeof DISCARD; export type ErrorHandler = (error: SyncError) => void; -export type DeferredCallbackResolverOptions = { - callback: () => void; +export interface DeferredCallbackResolverOptions { + callback(): void; maxInterval?: number; - errorHandler?: (error: string) => void; -}; + errorHandler?(error: string): void; +} export enum LimitTimerRaceResolvedValues { LIMIT = 'LIMIT', TIMER = 'TIMER', } -//#endregion +// #endregion -export type AmplifyContext = { +export interface AmplifyContext { InternalAPI: typeof InternalAPI; -}; +} // #region V5 predicate types @@ -1206,7 +1206,7 @@ export type ModelPredicateAggregateExtender = ( ) => PredicateInternalsKey[]; export type ValuePredicate< - RT extends PersistentModel, + _RT extends PersistentModel, MT extends MatchableTypes, > = { [K in AllFieldOperators]: K extends 'between' @@ -1240,7 +1240,7 @@ export type ModelPredicateNegation = ( * that should not be exposed on public customer interfaces. */ export class PredicateInternalsKey { - private __isPredicateInternalsKeySentinel: boolean = true; + private __isPredicateInternalsKeySentinel = true; } // #endregion diff --git a/packages/datastore/src/util.ts b/packages/datastore/src/util.ts index 684bda822c1..cd33533ebef 100644 --- a/packages/datastore/src/util.ts +++ b/packages/datastore/src/util.ts @@ -1,40 +1,41 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { monotonicFactory, ULID } from 'ulid'; +import { ULID, monotonicFactory } from 'ulid'; import { - amplifyUuid, AmplifyUrl, WordArray, + amplifyUuid, } from '@aws-amplify/core/internals/utils'; -import { produce, applyPatches, Patch } from 'immer'; +import { Patch, applyPatches, produce } from 'immer'; + import { ModelInstanceCreator } from './datastore/datastore'; import { AllOperators, - isPredicateGroup, - isPredicateObj, + DeferredCallbackResolverOptions, + IndexesType, + LimitTimerRaceResolvedValues, + ModelAssociation, + ModelAttribute, + ModelAttributes, + ModelKeys, + NonModelTypeConstructor, + PaginationInput, PersistentModel, PersistentModelConstructor, PredicateGroups, PredicateObject, PredicatesGroup, - RelationshipType, RelationType, - ModelKeys, - ModelAttributes, + RelationshipType, + SchemaModel, SchemaNamespace, - SortPredicatesGroup, SortDirection, + SortPredicatesGroup, + isModelAttributeCompositeKey, isModelAttributeKey, isModelAttributePrimaryKey, - isModelAttributeCompositeKey, - NonModelTypeConstructor, - PaginationInput, - DeferredCallbackResolverOptions, - LimitTimerRaceResolvedValues, - SchemaModel, - ModelAttribute, - IndexesType, - ModelAssociation, + isPredicateGroup, + isPredicateObj, } from './types'; import { ModelSortPredicateCreator } from './predicates'; @@ -73,14 +74,14 @@ export enum NAMESPACES { STORAGE = 'storage', } -const DATASTORE = NAMESPACES.DATASTORE; -const USER = NAMESPACES.USER; -const SYNC = NAMESPACES.SYNC; -const STORAGE = NAMESPACES.STORAGE; +const { DATASTORE } = NAMESPACES; +const { USER } = NAMESPACES; +const { SYNC } = NAMESPACES; +const { STORAGE } = NAMESPACES; export { USER, SYNC, STORAGE, DATASTORE }; -export const exhaustiveCheck = (obj: never, throwOnError: boolean = true) => { +export const exhaustiveCheck = (obj: never, throwOnError = true) => { if (throwOnError) { throw new Error(`Invalid ${obj}`); } @@ -127,6 +128,7 @@ export const validatePredicate = ( if (isPredicateGroup(predicateOrGroup)) { const { type, predicates } = predicateOrGroup; + return validatePredicate(model, type, predicates); } @@ -154,23 +156,26 @@ export const validatePredicateField = ( return value >= operand; case 'gt': return value > operand; - case 'between': - const [min, max] = <[T, T]>operand; + case 'between': { + const [min, max] = operand as [T, T]; + return value >= min && value <= max; + } case 'beginsWith': return ( !isNullOrUndefined(value) && - ((value)).startsWith((operand)) + (value as unknown as string).startsWith(operand as unknown as string) ); case 'contains': return ( !isNullOrUndefined(value) && - ((value)).indexOf((operand)) > -1 + (value as unknown as string).indexOf(operand as unknown as string) > -1 ); case 'notContains': return ( isNullOrUndefined(value) || - ((value)).indexOf((operand)) === -1 + (value as unknown as string).indexOf(operand as unknown as string) === + -1 ); default: return false; @@ -181,7 +186,7 @@ export const isModelConstructor = ( obj: any, ): obj is PersistentModelConstructor => { return ( - obj && typeof (>obj).copyOf === 'function' + obj && typeof (obj as PersistentModelConstructor).copyOf === 'function' ); }; @@ -220,7 +225,9 @@ export const traverseModel = ( instance: T; }[] = []; - const newInstance = modelConstructor.copyOf(instance, () => {}); + const newInstance = modelConstructor.copyOf(instance, () => { + // no-op + }); result.unshift({ modelName: srcModelName, @@ -251,6 +258,7 @@ let privateModeCheckResult; export const isPrivateMode = () => { return new Promise(resolve => { const dbname = amplifyUuid(); + // eslint-disable-next-line prefer-const let db; const isPrivate = () => { @@ -268,7 +276,7 @@ export const isPrivateMode = () => { privateModeCheckResult = true; - return resolve(false); + resolve(false); }; if (privateModeCheckResult === true) { @@ -276,10 +284,16 @@ export const isPrivateMode = () => { } if (privateModeCheckResult === false) { - return isPrivate(); + isPrivate(); + + return; } - if (indexedDB === null) return isPrivate(); + if (indexedDB === null) { + isPrivate(); + + return; + } db = indexedDB.open(dbname); db.onerror = isPrivate; @@ -313,19 +327,23 @@ export const isSafariCompatabilityMode: () => Promise = async () => { const db: IDBDatabase | false = await new Promise(resolve => { const dbOpenRequest = indexedDB.open(dbName); - dbOpenRequest.onerror = () => resolve(false); + dbOpenRequest.onerror = () => { + resolve(false); + }; dbOpenRequest.onsuccess = () => { - const db = dbOpenRequest.result; - resolve(db); + const openedDb = dbOpenRequest.result; + resolve(openedDb); }; dbOpenRequest.onupgradeneeded = (event: any) => { - const db = event?.target?.result; + const upgradedDb = event?.target?.result; - db.onerror = () => resolve(false); + upgradedDb.onerror = () => { + resolve(false); + }; - const store = db.createObjectStore(storeName, { + const store = upgradedDb.createObjectStore(storeName, { autoIncrement: true, }); @@ -352,7 +370,9 @@ export const isSafariCompatabilityMode: () => Promise = async () => { const getRequest = index.get([1]); - getRequest.onerror = () => resolve(false); + getRequest.onerror = () => { + resolve(false); + }; getRequest.onsuccess = (event: any) => { resolve(event?.target?.result); @@ -360,6 +380,7 @@ export const isSafariCompatabilityMode: () => Promise = async () => { }); if (db && typeof db.close === 'function') { + // eslint-disable-next-line @typescript-eslint/no-confusing-void-expression await db.close(); } @@ -486,7 +507,7 @@ export function sortCompareFunction( export function directedValueEquality( fromObject: object, againstObject: object, - nullish: boolean = false, + nullish = false, ) { const aKeys = Object.keys(fromObject); @@ -507,11 +528,7 @@ export function directedValueEquality( // returns true if equal by value // if nullish is true, treat undefined and null values as equal // to normalize for GQL response values for undefined fields -export function valuesEqual( - valA: any, - valB: any, - nullish: boolean = false, -): boolean { +export function valuesEqual(valA: any, valB: any, nullish = false): boolean { let a = valA; let b = valB; @@ -610,6 +627,7 @@ export function inMemoryPagination( return records.slice(start, end); } + return records; } @@ -629,6 +647,7 @@ export async function asyncSome( return true; } } + return false; } @@ -648,6 +667,7 @@ export async function asyncEvery( return false; } } + return true; } @@ -669,6 +689,7 @@ export async function asyncFilter( results.push(item); } } + return results; } @@ -701,6 +722,7 @@ export const isAWSEmail = (val: string): boolean => { export const isAWSJSON = (val: string): boolean => { try { JSON.parse(val); + return true; } catch { return false; @@ -730,6 +752,7 @@ export class DeferredPromise { public resolve: (value: string | PromiseLike) => void; public reject: () => void; constructor() { + // eslint-disable-next-line @typescript-eslint/no-this-alias const self = this; this.promise = new Promise( (resolve: (value: string | PromiseLike) => void, reject) => { @@ -746,7 +769,10 @@ export class DeferredCallbackResolver { private maxInterval: number; private timer: ReturnType; private raceInFlight = false; - private callback = () => {}; + private callback = () => { + // no-op + }; + private errorHandler: (error: string) => void; private defaultErrorHandler = ( msg = 'DeferredCallbackResolver error', @@ -761,7 +787,7 @@ export class DeferredCallbackResolver { } private startTimer(): void { - this.timerPromise = new Promise((resolve, reject) => { + this.timerPromise = new Promise((resolve, _reject) => { this.timer = setTimeout(() => { resolve(LimitTimerRaceResolvedValues.TIMER); }, this.maxInterval); @@ -786,6 +812,7 @@ export class DeferredCallbackResolver { this.raceInFlight = false; this.limitPromise = new DeferredPromise(); + // eslint-disable-next-line no-unsafe-finally return winner!; } } @@ -836,6 +863,7 @@ export function mergePatches( patches = p; }, ); + return patches!; } @@ -845,7 +873,7 @@ export const getStorename = (namespace: string, modelName: string) => { return storeName; }; -//#region Key Utils +// #region Key Utils /* When we have GSI(s) with composite sort keys defined on a model @@ -903,6 +931,7 @@ export const processCompositeKeys = ( if (combined.length === 0) { combined.push(sortKeyFieldsSet); + return combined; } @@ -966,6 +995,7 @@ export const extractPrimaryKeysAndValues = ( ): any => { const primaryKeysAndValues = {}; keyFields.forEach(key => (primaryKeysAndValues[key] = model[key])); + return primaryKeysAndValues; }; @@ -1012,13 +1042,14 @@ export const establishRelationAndKeys = ( typeof fieldAttribute.type === 'object' && 'model' in fieldAttribute.type ) { - const connectionType = fieldAttribute.association!.connectionType; + const { connectionType } = fieldAttribute.association!; relationship[mKey].relationTypes.push({ fieldName: fieldAttribute.name, modelName: fieldAttribute.type.model, relationType: connectionType, - targetName: fieldAttribute.association!['targetName'], - targetNames: fieldAttribute.association!['targetNames'], + targetName: fieldAttribute.association!.targetName, + targetNames: fieldAttribute.association!.targetNames, + // eslint-disable-next-line dot-notation associatedWith: fieldAttribute.association!['associatedWith'], }); @@ -1089,13 +1120,16 @@ export const getIndex = ( src: string, ): string | undefined => { let indexName; + // eslint-disable-next-line array-callback-return rel.some((relItem: RelationType) => { if (relItem.modelName === src) { const targetNames = extractTargetNamesFromSrc(relItem); indexName = targetNames && indexNameFromKeys(targetNames); + return true; } }); + return indexName; }; @@ -1112,6 +1146,7 @@ export const getIndexFromAssociation = ( } const associationIndex = indexes.find(([idxName]) => idxName === indexName); + return associationIndex && associationIndex[0]; }; @@ -1144,6 +1179,7 @@ export const indexNameFromKeys = (keys: string[]): string => { if (idx === 0) { return cur; } + return `${prev}${IDENTIFIER_KEY_SEPARATOR}${cur}`; }, ''); }; @@ -1170,7 +1206,7 @@ export const getIndexKeys = ( return [ID]; }; -//#endregion +// #endregion /** * Determine what the managed timestamp field names are for the given model definition diff --git a/packages/datastore/tslint.json b/packages/datastore/tslint.json deleted file mode 100644 index 081fa33ae8f..00000000000 --- a/packages/datastore/tslint.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "defaultSeverity": "error", - "plugins": ["prettier"], - "extends": [], - "jsRules": {}, - "rules": { - "prefer-const": true, - "no-empty-interface": true, - "no-var-keyword": true, - "object-literal-shorthand": true, - "no-eval": true, - "space-before-function-paren": [ - true, - { - "anonymous": "never", - "named": "never" - } - ], - "no-parameter-reassignment": true, - "align": [true, "parameters"], - "no-duplicate-imports": true, - "one-variable-per-declaration": [false, "ignore-for-loop"], - "triple-equals": [true, "allow-null-check"], - "comment-format": [true, "check-space"], - "indent": [false], - "whitespace": [ - false, - "check-branch", - "check-decl", - "check-operator", - "check-preblock" - ], - "eofline": true, - "variable-name": [ - true, - "check-format", - "allow-pascal-case", - "allow-snake-case", - "allow-leading-underscore" - ], - "semicolon": [ - true, - "always", - "ignore-interfaces", - "ignore-bound-class-methods" - ] - }, - "rulesDirectory": [] -} diff --git a/packages/notifications/__tests__/pushNotifications/providers/pinpoint/apis/identifyUser.native.test.ts b/packages/notifications/__tests__/pushNotifications/providers/pinpoint/apis/identifyUser.native.test.ts index d759f418ed6..5d373a8fe73 100644 --- a/packages/notifications/__tests__/pushNotifications/providers/pinpoint/apis/identifyUser.native.test.ts +++ b/packages/notifications/__tests__/pushNotifications/providers/pinpoint/apis/identifyUser.native.test.ts @@ -1,7 +1,10 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { updateEndpoint } from '@aws-amplify/core/internals/providers/pinpoint'; +import { + getEndpointId, + updateEndpoint, +} from '@aws-amplify/core/internals/providers/pinpoint'; import { assertIsInitialized } from '../../../../../src/pushNotifications/errors/errorHelpers'; import { identifyUser } from '../../../../../src/pushNotifications/providers/pinpoint/apis/identifyUser.native'; import { IdentifyUserInput } from '../../../../../src/pushNotifications/providers/pinpoint/types'; @@ -11,6 +14,7 @@ import { } from '../../../../../src/pushNotifications/utils'; import { getChannelType, + getInflightDeviceRegistration, resolveConfig, } from '../../../../../src/pushNotifications/providers/pinpoint/utils'; import { @@ -32,11 +36,14 @@ describe('identifyUser (native)', () => { // assert mocks const mockAssertIsInitialized = assertIsInitialized as jest.Mock; const mockGetChannelType = getChannelType as jest.Mock; - const mockUpdateEndpoint = updateEndpoint as jest.Mock; + const mockGetEndpointId = getEndpointId as jest.Mock; + const mockGetInflightDeviceRegistration = + getInflightDeviceRegistration as jest.Mock; const mockGetPushNotificationUserAgentString = getPushNotificationUserAgentString as jest.Mock; const mockResolveConfig = resolveConfig as jest.Mock; const mockResolveCredentials = resolveCredentials as jest.Mock; + const mockUpdateEndpoint = updateEndpoint as jest.Mock; beforeAll(() => { mockGetChannelType.mockReturnValue(channelType); @@ -47,7 +54,9 @@ describe('identifyUser (native)', () => { afterEach(() => { mockAssertIsInitialized.mockReset(); + mockGetEndpointId.mockReset(); mockUpdateEndpoint.mockReset(); + mockGetInflightDeviceRegistration.mockClear(); }); it('must be initialized', async () => { @@ -111,4 +120,24 @@ describe('identifyUser (native)', () => { }; await expect(identifyUser(input)).rejects.toBeDefined(); }); + + it('awaits device registration promise when endpoint is not present', async () => { + const input: IdentifyUserInput = { + userId: 'user-id', + userProfile: {}, + }; + mockGetEndpointId.mockResolvedValue(undefined); + await identifyUser(input); + expect(mockGetInflightDeviceRegistration).toHaveBeenCalled(); + }); + + it('does not await device registration promise when endpoint is present', async () => { + const input: IdentifyUserInput = { + userId: 'user-id', + userProfile: {}, + }; + mockGetEndpointId.mockResolvedValue('endpoint-id'); + await identifyUser(input); + expect(mockGetInflightDeviceRegistration).not.toHaveBeenCalled(); + }); }); diff --git a/packages/notifications/__tests__/pushNotifications/providers/pinpoint/apis/initializePushNotifications.native.test.ts b/packages/notifications/__tests__/pushNotifications/providers/pinpoint/apis/initializePushNotifications.native.test.ts index 42011ab0893..404e572b659 100644 --- a/packages/notifications/__tests__/pushNotifications/providers/pinpoint/apis/initializePushNotifications.native.test.ts +++ b/packages/notifications/__tests__/pushNotifications/providers/pinpoint/apis/initializePushNotifications.native.test.ts @@ -13,7 +13,11 @@ import { resolveCredentials, setToken, } from '../../../../../src/pushNotifications/utils'; -import { resolveConfig } from '../../../../../src/pushNotifications//providers/pinpoint/utils'; +import { + rejectInflightDeviceRegistration, + resolveConfig, + resolveInflightDeviceRegistration, +} from '../../../../../src/pushNotifications//providers/pinpoint/utils'; import { completionHandlerId, credentials, @@ -56,8 +60,12 @@ describe('initializePushNotifications (native)', () => { const mockGetToken = getToken as jest.Mock; const mockInitialize = initialize as jest.Mock; const mockIsInitialized = isInitialized as jest.Mock; + const mockRejectInflightDeviceRegistration = + rejectInflightDeviceRegistration as jest.Mock; const mockResolveCredentials = resolveCredentials as jest.Mock; const mockResolveConfig = resolveConfig as jest.Mock; + const mockResolveInflightDeviceRegistration = + resolveInflightDeviceRegistration as jest.Mock; const mockSetToken = setToken as jest.Mock; const mockNotifyEventListeners = notifyEventListeners as jest.Mock; const mockNotifyEventListenersAndAwaitHandlers = @@ -114,6 +122,8 @@ describe('initializePushNotifications (native)', () => { mockEventListenerRemover.remove.mockClear(); mockNotifyEventListeners.mockClear(); mockNotifyEventListenersAndAwaitHandlers.mockClear(); + mockRejectInflightDeviceRegistration.mockClear(); + mockResolveInflightDeviceRegistration.mockClear(); }); it('only enables once', () => { @@ -236,29 +246,29 @@ describe('initializePushNotifications (native)', () => { describe('token received', () => { it('registers and calls token received listener', done => { + expect.assertions(6); mockGetToken.mockReturnValue(undefined); mockAddTokenEventListener.mockImplementation( async (heardEvent, handler) => { if (heardEvent === NativeEvent.TOKEN_RECEIVED) { await handler(pushToken); + expect(mockAddTokenEventListener).toHaveBeenCalledWith( + NativeEvent.TOKEN_RECEIVED, + expect.any(Function), + ); + expect(mockSetToken).toHaveBeenCalledWith(pushToken); + expect(mockNotifyEventListeners).toHaveBeenCalledWith( + 'tokenReceived', + pushToken, + ); + expect(mockUpdateEndpoint).toHaveBeenCalled(); + expect(mockResolveInflightDeviceRegistration).toHaveBeenCalled(); + expect(mockRejectInflightDeviceRegistration).not.toHaveBeenCalled(); + done(); } }, ); - mockUpdateEndpoint.mockImplementation(() => { - expect(mockUpdateEndpoint).toHaveBeenCalled(); - done(); - }); initializePushNotifications(); - - expect(mockAddTokenEventListener).toHaveBeenCalledWith( - NativeEvent.TOKEN_RECEIVED, - expect.any(Function), - ); - expect(mockSetToken).toHaveBeenCalledWith(pushToken); - expect(mockNotifyEventListeners).toHaveBeenCalledWith( - 'tokenReceived', - pushToken, - ); }); it('should not be invoke token received listener with the same token twice', () => { @@ -292,6 +302,7 @@ describe('initializePushNotifications (native)', () => { }); it('throws if device registration fails', done => { + expect.assertions(3); mockUpdateEndpoint.mockImplementation(() => { throw new Error(); }); @@ -299,6 +310,10 @@ describe('initializePushNotifications (native)', () => { async (heardEvent, handler) => { if (heardEvent === NativeEvent.TOKEN_RECEIVED) { await expect(handler(pushToken)).rejects.toThrow(); + expect( + mockResolveInflightDeviceRegistration, + ).not.toHaveBeenCalled(); + expect(mockRejectInflightDeviceRegistration).toHaveBeenCalled(); done(); } }, diff --git a/packages/notifications/__tests__/pushNotifications/providers/pinpoint/utils/inflightDeviceRegistration.test.ts b/packages/notifications/__tests__/pushNotifications/providers/pinpoint/utils/inflightDeviceRegistration.test.ts new file mode 100644 index 00000000000..adead4e916f --- /dev/null +++ b/packages/notifications/__tests__/pushNotifications/providers/pinpoint/utils/inflightDeviceRegistration.test.ts @@ -0,0 +1,73 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + getInflightDeviceRegistration, + rejectInflightDeviceRegistration, + resolveInflightDeviceRegistration, +} from '../../../../../src/pushNotifications/providers/pinpoint/utils/inflightDeviceRegistration'; +import { InflightDeviceRegistration } from '../../../../../src/pushNotifications/providers/pinpoint/types'; + +describe('inflightDeviceRegistration', () => { + describe('resolveInflightDeviceRegistration', () => { + let getInflightDeviceRegistration: () => InflightDeviceRegistration; + let resolveInflightDeviceRegistration: () => void; + jest.isolateModules(() => { + ({ + getInflightDeviceRegistration, + resolveInflightDeviceRegistration, + } = require('../../../../../src/pushNotifications/providers/pinpoint/utils/inflightDeviceRegistration')); + }); + + it('creates a pending promise on module load', () => { + expect(getInflightDeviceRegistration()).toBeDefined(); + }); + + it('should resolve the promise', async () => { + const blockedFunction = jest.fn(); + const promise = getInflightDeviceRegistration()?.then(() => { + blockedFunction(); + }); + + expect(blockedFunction).not.toHaveBeenCalled(); + resolveInflightDeviceRegistration(); + await promise; + expect(blockedFunction).toHaveBeenCalled(); + }); + + it('should have released the promise from memory', () => { + expect(getInflightDeviceRegistration()).toBeUndefined(); + }); + }); + + describe('rejectInflightDeviceRegistration', () => { + let getInflightDeviceRegistration: () => InflightDeviceRegistration; + let rejectInflightDeviceRegistration: (underlyingError: unknown) => void; + jest.isolateModules(() => { + ({ + getInflightDeviceRegistration, + rejectInflightDeviceRegistration, + } = require('../../../../../src/pushNotifications/providers/pinpoint/utils/inflightDeviceRegistration')); + }); + + it('creates a pending promise on module load', () => { + expect(getInflightDeviceRegistration()).toBeDefined(); + }); + + it('should reject the promise', async () => { + const underlyingError = new Error('underlying-error'); + const blockedFunction = jest.fn(); + const promise = getInflightDeviceRegistration()?.then(() => { + blockedFunction(); + }); + + expect(blockedFunction).not.toHaveBeenCalled(); + rejectInflightDeviceRegistration(underlyingError); + await expect(promise).rejects.toMatchObject({ + name: 'DeviceRegistrationFailed', + underlyingError, + }); + expect(blockedFunction).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/packages/notifications/package.json b/packages/notifications/package.json index dd462cfd3aa..a9b4c7649c4 100644 --- a/packages/notifications/package.json +++ b/packages/notifications/package.json @@ -12,7 +12,7 @@ }, "scripts": { "test": "npm run lint && jest -w 1 --coverage --logHeapUsage", - "test:watch": "tslint 'src/**/*.ts' && jest -w 1 --watch", + "test:watch": "jest -w 1 --watch", "build-with-test": "npm run clean && npm run build", "build:umd": "webpack && webpack --config ./webpack.config.dev.js", "build:esm-cjs": "rollup --forceExit -c rollup.config.mjs", diff --git a/packages/notifications/src/pushNotifications/providers/pinpoint/apis/identifyUser.native.ts b/packages/notifications/src/pushNotifications/providers/pinpoint/apis/identifyUser.native.ts index ee0581d6368..d953797a6a5 100644 --- a/packages/notifications/src/pushNotifications/providers/pinpoint/apis/identifyUser.native.ts +++ b/packages/notifications/src/pushNotifications/providers/pinpoint/apis/identifyUser.native.ts @@ -2,14 +2,21 @@ // SPDX-License-Identifier: Apache-2.0 import { PushNotificationAction } from '@aws-amplify/core/internals/utils'; -import { updateEndpoint } from '@aws-amplify/core/internals/providers/pinpoint'; +import { + getEndpointId, + updateEndpoint, +} from '@aws-amplify/core/internals/providers/pinpoint'; import { assertIsInitialized } from '../../../errors/errorHelpers'; import { getPushNotificationUserAgentString, resolveCredentials, } from '../../../utils'; -import { getChannelType, resolveConfig } from '../utils'; +import { + getChannelType, + getInflightDeviceRegistration, + resolveConfig, +} from '../utils'; import { IdentifyUser } from '../types'; export const identifyUser: IdentifyUser = async ({ @@ -21,6 +28,10 @@ export const identifyUser: IdentifyUser = async ({ const { credentials, identityId } = await resolveCredentials(); const { appId, region } = resolveConfig(); const { address, optOut, userAttributes } = options ?? {}; + if (!(await getEndpointId(appId, 'PushNotification'))) { + // if there is no cached endpoint id, wait for successful endpoint creation before continuing + await getInflightDeviceRegistration(); + } await updateEndpoint({ address, channelType: getChannelType(), diff --git a/packages/notifications/src/pushNotifications/providers/pinpoint/apis/initializePushNotifications.native.ts b/packages/notifications/src/pushNotifications/providers/pinpoint/apis/initializePushNotifications.native.ts index 0e6de6f4212..2582adab22f 100644 --- a/packages/notifications/src/pushNotifications/providers/pinpoint/apis/initializePushNotifications.native.ts +++ b/packages/notifications/src/pushNotifications/providers/pinpoint/apis/initializePushNotifications.native.ts @@ -23,7 +23,9 @@ import { import { createMessageEventRecorder, getChannelType, + rejectInflightDeviceRegistration, resolveConfig, + resolveInflightDeviceRegistration, } from '../utils'; const { @@ -203,16 +205,24 @@ const addAnalyticsListeners = (): void => { const registerDevice = async (address: string): Promise => { const { credentials, identityId } = await resolveCredentials(); const { appId, region } = resolveConfig(); - await updateEndpoint({ - address, - appId, - category: 'PushNotification', - credentials, - region, - channelType: getChannelType(), - identityId, - userAgentValue: getPushNotificationUserAgentString( - PushNotificationAction.InitializePushNotifications, - ), - }); + try { + await updateEndpoint({ + address, + appId, + category: 'PushNotification', + credentials, + region, + channelType: getChannelType(), + identityId, + userAgentValue: getPushNotificationUserAgentString( + PushNotificationAction.InitializePushNotifications, + ), + }); + // always resolve inflight device registration promise here even though the promise is only awaited on by + // `identifyUser` when no endpoint is found in the cache + resolveInflightDeviceRegistration(); + } catch (underlyingError) { + rejectInflightDeviceRegistration(underlyingError); + throw underlyingError; + } }; diff --git a/packages/notifications/src/pushNotifications/providers/pinpoint/types/index.ts b/packages/notifications/src/pushNotifications/providers/pinpoint/types/index.ts index 6593be71f55..1e9fc1c5245 100644 --- a/packages/notifications/src/pushNotifications/providers/pinpoint/types/index.ts +++ b/packages/notifications/src/pushNotifications/providers/pinpoint/types/index.ts @@ -37,4 +37,8 @@ export { OnTokenReceivedOutput, } from './outputs'; export { IdentifyUserOptions } from './options'; -export { ChannelType } from './pushNotifications'; +export { + ChannelType, + InflightDeviceRegistration, + InflightDeviceRegistrationResolver, +} from './pushNotifications'; diff --git a/packages/notifications/src/pushNotifications/providers/pinpoint/types/pushNotifications.ts b/packages/notifications/src/pushNotifications/providers/pinpoint/types/pushNotifications.ts index 8bebe07135a..bc4590edc00 100644 --- a/packages/notifications/src/pushNotifications/providers/pinpoint/types/pushNotifications.ts +++ b/packages/notifications/src/pushNotifications/providers/pinpoint/types/pushNotifications.ts @@ -3,4 +3,13 @@ import { updateEndpoint } from '@aws-amplify/core/internals/providers/pinpoint'; +import { PushNotificationError } from '../../../errors'; + export type ChannelType = Parameters[0]['channelType']; + +export type InflightDeviceRegistration = Promise | undefined; + +export interface InflightDeviceRegistrationResolver { + resolve?(): void; + reject?(error: PushNotificationError): void; +} diff --git a/packages/notifications/src/pushNotifications/providers/pinpoint/utils/index.ts b/packages/notifications/src/pushNotifications/providers/pinpoint/utils/index.ts index bc677bceb47..8551b1e1e02 100644 --- a/packages/notifications/src/pushNotifications/providers/pinpoint/utils/index.ts +++ b/packages/notifications/src/pushNotifications/providers/pinpoint/utils/index.ts @@ -4,4 +4,9 @@ export { createMessageEventRecorder } from './createMessageEventRecorder'; export { getAnalyticsEvent } from './getAnalyticsEvent'; export { getChannelType } from './getChannelType'; +export { + getInflightDeviceRegistration, + rejectInflightDeviceRegistration, + resolveInflightDeviceRegistration, +} from './inflightDeviceRegistration'; export { resolveConfig } from './resolveConfig'; diff --git a/packages/notifications/src/pushNotifications/providers/pinpoint/utils/inflightDeviceRegistration.ts b/packages/notifications/src/pushNotifications/providers/pinpoint/utils/inflightDeviceRegistration.ts new file mode 100644 index 00000000000..39d9b212e63 --- /dev/null +++ b/packages/notifications/src/pushNotifications/providers/pinpoint/utils/inflightDeviceRegistration.ts @@ -0,0 +1,38 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { PushNotificationError } from '../../../errors'; +import { + InflightDeviceRegistration, + InflightDeviceRegistrationResolver, +} from '../types'; + +const inflightDeviceRegistrationResolver: InflightDeviceRegistrationResolver = + {}; + +let inflightDeviceRegistration: InflightDeviceRegistration = new Promise( + (resolve, reject) => { + inflightDeviceRegistrationResolver.resolve = resolve; + inflightDeviceRegistrationResolver.reject = reject; + }, +); + +export const getInflightDeviceRegistration = () => inflightDeviceRegistration; + +export const resolveInflightDeviceRegistration = () => { + inflightDeviceRegistrationResolver.resolve?.(); + // release promise from memory + inflightDeviceRegistration = undefined; +}; + +export const rejectInflightDeviceRegistration = (underlyingError: unknown) => { + inflightDeviceRegistrationResolver.reject?.( + new PushNotificationError({ + name: 'DeviceRegistrationFailed', + message: 'Failed to register device for push notifications.', + underlyingError, + }), + ); + // release promise from memory + inflightDeviceRegistration = undefined; +}; diff --git a/packages/predictions/__tests__/providers/AWSAIIdentifyPredictionsProvider.test.ts b/packages/predictions/__tests__/providers/AWSAIIdentifyPredictionsProvider.test.ts index 77d44ee8a7f..042ca5c7114 100644 --- a/packages/predictions/__tests__/providers/AWSAIIdentifyPredictionsProvider.test.ts +++ b/packages/predictions/__tests__/providers/AWSAIIdentifyPredictionsProvider.test.ts @@ -291,7 +291,6 @@ mockGetUrl.mockImplementation(({ key, options }) => { ); } else { const identityId = options?.targetIdentityId || 'identityId'; - // tslint:disable-next-line: max-line-length url = new URL( `https://bucket-name.s3.us-west-2.amazonaws.com/${level}/${identityId}/key.png?X-Amz-Algorithm=AWS4-HMAC-SHA256`, ); diff --git a/packages/react-native/package.json b/packages/react-native/package.json index cd45c993734..20ff2a87470 100644 --- a/packages/react-native/package.json +++ b/packages/react-native/package.json @@ -11,7 +11,7 @@ "access": "public" }, "scripts": { - "test": "tslint 'src/**/*.ts'", + "test": "echo 'no-op'", "test:android": "./android/gradlew test -p ./android", "build-with-test": "npm run clean && npm test && tsc", "build:esm-cjs": "rollup --forceExit -c rollup.config.mjs", diff --git a/packages/rtn-web-browser/package.json b/packages/rtn-web-browser/package.json index 857f2c3985a..a08b657d875 100644 --- a/packages/rtn-web-browser/package.json +++ b/packages/rtn-web-browser/package.json @@ -11,7 +11,7 @@ "access": "public" }, "scripts": { - "test": "tslint 'src/**/*.ts'", + "test": "echo 'no-op'", "test:android": "./android/gradlew test -p ./android", "build-with-test": "npm run clean && npm test && tsc", "build:esm-cjs": "rollup --forceExit -c rollup.config.mjs", diff --git a/packages/storage/__tests__/providers/s3/apis/copy.test.ts b/packages/storage/__tests__/providers/s3/apis/copy.test.ts index 52eaf7c902f..260e38d4863 100644 --- a/packages/storage/__tests__/providers/s3/apis/copy.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/copy.test.ts @@ -3,14 +3,15 @@ import { AWSCredentials } from '@aws-amplify/core/internals/utils'; import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; + import { StorageError } from '../../../../src/errors/StorageError'; import { StorageValidationErrorCode } from '../../../../src/errors/types/validation'; import { copyObject } from '../../../../src/providers/s3/utils/client'; import { copy } from '../../../../src/providers/s3/apis'; import { CopyInput, - CopyWithPathInput, CopyOutput, + CopyWithPathInput, CopyWithPathOutput, } from '../../../../src/providers/s3/types'; @@ -81,14 +82,14 @@ describe('copy API', () => { afterEach(() => { jest.clearAllMocks(); }); - const testCases: Array<{ + const testCases: { source: { accessLevel?: StorageAccessLevel; targetIdentityId?: string }; destination: { accessLevel?: StorageAccessLevel; }; expectedSourceKey: string; expectedDestinationKey: string; - }> = [ + }[] = [ { source: { accessLevel: 'guest' }, destination: { accessLevel: 'guest' }, @@ -260,18 +261,17 @@ describe('copy API', () => { }), ); expect.assertions(3); - const sourceKey = 'SourceKeyNotFound'; - const destinationKey = 'destinationKey'; + const missingSourceKey = 'SourceKeyNotFound'; try { await copy({ - source: { key: sourceKey }, + source: { key: missingSourceKey }, destination: { key: destinationKey }, }); } catch (error: any) { expect(copyObject).toHaveBeenCalledTimes(1); expect(copyObject).toHaveBeenCalledWith(copyObjectClientConfig, { ...copyObjectClientBaseParams, - CopySource: `${bucket}/public/${sourceKey}`, + CopySource: `${bucket}/public/${missingSourceKey}`, Key: `public/${destinationKey}`, }); expect(error.$metadata.httpStatusCode).toBe(404); @@ -281,7 +281,7 @@ describe('copy API', () => { it('should return a path not found error when source uses path and destination uses key', async () => { expect.assertions(2); try { - // @ts-expect-error + // @ts-expect-error mismatch copy input not allowed await copy({ source: { path: 'sourcePath' }, destination: { key: 'destinationKey' }, @@ -296,7 +296,7 @@ describe('copy API', () => { it('should return a key not found error when source uses key and destination uses path', async () => { expect.assertions(2); try { - // @ts-expect-error + // @ts-expect-error mismatch copy input not allowed await copy({ source: { key: 'sourcePath' }, destination: { path: 'destinationKey' }, diff --git a/packages/storage/__tests__/providers/s3/apis/downloadData.test.ts b/packages/storage/__tests__/providers/s3/apis/downloadData.test.ts index 0c9c4a3d007..721720679c0 100644 --- a/packages/storage/__tests__/providers/s3/apis/downloadData.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/downloadData.test.ts @@ -3,6 +3,7 @@ import { AWSCredentials } from '@aws-amplify/core/internals/utils'; import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; + import { getObject } from '../../../../src/providers/s3/utils/client'; import { downloadData } from '../../../../src/providers/s3'; import { @@ -91,15 +92,15 @@ describe('downloadData with key', () => { it('should return a download task with key', async () => { const mockDownloadInput: DownloadDataInput = { key: inputKey, - options: { accessLevel: 'protected', targetIdentityId: targetIdentityId }, + options: { accessLevel: 'protected', targetIdentityId }, }; expect(downloadData(mockDownloadInput)).toBe('downloadTask'); }); - const testCases: Array<{ + const testCases: { expectedKey: string; options?: { accessLevel?: StorageAccessLevel; targetIdentityId?: string }; - }> = [ + }[] = [ { expectedKey: `public/${inputKey}`, }, @@ -134,7 +135,7 @@ describe('downloadData with key', () => { onProgress, }, }); - const job = mockCreateDownloadTask.mock.calls[0][0].job; + const { job } = mockCreateDownloadTask.mock.calls[0][0]; const { key, body }: StorageDownloadDataOutput = await job(); expect({ key, body }).toEqual({ key: inputKey, @@ -169,7 +170,7 @@ describe('downloadData with key', () => { ContentType: 'contentType', }); downloadData({ key: inputKey }); - const job = mockCreateDownloadTask.mock.calls[0][0].job; + const { job } = mockCreateDownloadTask.mock.calls[0][0]; const { key, body, @@ -208,7 +209,7 @@ describe('downloadData with key', () => { }, }); - const job = mockCreateDownloadTask.mock.calls[0][0].job; + const { job } = mockCreateDownloadTask.mock.calls[0][0]; await job(); expect(getObject).toHaveBeenCalledWith( @@ -274,7 +275,7 @@ describe('downloadData with path', () => { onProgress, }, }); - const job = mockCreateDownloadTask.mock.calls[0][0].job; + const { job } = mockCreateDownloadTask.mock.calls[0][0]; const { path: resultPath, body, @@ -315,7 +316,7 @@ describe('downloadData with path', () => { ContentType: 'contentType', }); downloadData({ path: inputPath }); - const job = mockCreateDownloadTask.mock.calls[0][0].job; + const { job } = mockCreateDownloadTask.mock.calls[0][0]; const { path, body, @@ -354,7 +355,7 @@ describe('downloadData with path', () => { }, }); - const job = mockCreateDownloadTask.mock.calls[0][0].job; + const { job } = mockCreateDownloadTask.mock.calls[0][0]; await job(); expect(getObject).toHaveBeenCalledWith( diff --git a/packages/storage/__tests__/providers/s3/apis/getProperties.test.ts b/packages/storage/__tests__/providers/s3/apis/getProperties.test.ts index 191802f04f9..3b2ca3cae58 100644 --- a/packages/storage/__tests__/providers/s3/apis/getProperties.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/getProperties.test.ts @@ -1,14 +1,15 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { headObject } from '../../../../src/providers/s3/utils/client'; -import { getProperties } from '../../../../src/providers/s3'; import { AWSCredentials } from '@aws-amplify/core/internals/utils'; import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; + +import { headObject } from '../../../../src/providers/s3/utils/client'; +import { getProperties } from '../../../../src/providers/s3'; import { GetPropertiesInput, - GetPropertiesWithPathInput, GetPropertiesOutput, + GetPropertiesWithPathInput, GetPropertiesWithPathOutput, } from '../../../../src/providers/s3/types'; @@ -88,10 +89,10 @@ describe('getProperties with key', () => { jest.clearAllMocks(); }); - const testCases: Array<{ + const testCases: { expectedKey: string; options?: { accessLevel?: StorageAccessLevel; targetIdentityId?: string }; - }> = [ + }[] = [ { expectedKey: `public/${inputKey}`, }, diff --git a/packages/storage/__tests__/providers/s3/apis/getUrl.test.ts b/packages/storage/__tests__/providers/s3/apis/getUrl.test.ts index 8f56299d943..428f8f2034c 100644 --- a/packages/storage/__tests__/providers/s3/apis/getUrl.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/getUrl.test.ts @@ -1,17 +1,18 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { getUrl } from '../../../../src/providers/s3/apis'; import { AWSCredentials } from '@aws-amplify/core/internals/utils'; import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; + +import { getUrl } from '../../../../src/providers/s3/apis'; import { getPresignedGetObjectUrl, headObject, } from '../../../../src/providers/s3/utils/client'; import { GetUrlInput, - GetUrlWithPathInput, GetUrlOutput, + GetUrlWithPathInput, GetUrlWithPathOutput, } from '../../../../src/providers/s3/types'; @@ -85,10 +86,10 @@ describe('getUrl test with key', () => { jest.clearAllMocks(); }); - const testCases: Array<{ + const testCases: { options?: { accessLevel?: StorageAccessLevel; targetIdentityId?: string }; expectedKey: string; - }> = [ + }[] = [ { expectedKey: `public/${key}`, }, diff --git a/packages/storage/__tests__/providers/s3/apis/list.test.ts b/packages/storage/__tests__/providers/s3/apis/list.test.ts index 21ad76cdc33..76f4d3a7881 100644 --- a/packages/storage/__tests__/providers/s3/apis/list.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/list.test.ts @@ -3,16 +3,17 @@ import { AWSCredentials } from '@aws-amplify/core/internals/utils'; import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; + import { listObjectsV2 } from '../../../../src/providers/s3/utils/client'; import { list } from '../../../../src/providers/s3'; import { ListAllInput, - ListAllWithPathInput, ListAllOutput, + ListAllWithPathInput, ListAllWithPathOutput, ListPaginateInput, - ListPaginateWithPathInput, ListPaginateOutput, + ListPaginateWithPathInput, ListPaginateWithPathOutput, } from '../../../../src/providers/s3/types'; @@ -31,15 +32,15 @@ jest.mock('@aws-amplify/core', () => ({ const mockFetchAuthSession = Amplify.Auth.fetchAuthSession as jest.Mock; const mockGetConfig = Amplify.getConfig as jest.Mock; const mockListObject = listObjectsV2 as jest.Mock; -const key = 'path/itemsKey'; +const inputKey = 'path/itemsKey'; const bucket = 'bucket'; const region = 'region'; const nextToken = 'nextToken'; const targetIdentityId = 'targetIdentityId'; const defaultIdentityId = 'defaultIdentityId'; -const eTag = 'eTag'; -const lastModified = 'lastModified'; -const size = 'size'; +const etagValue = 'eTag'; +const lastModifiedValue = 'lastModified'; +const sizeValue = 'size'; const credentials: AWSCredentials = { accessKeyId: 'accessKeyId', sessionToken: 'sessionToken', @@ -51,20 +52,20 @@ const listObjectClientConfig = { userAgentValue: expect.any(String), }; const listObjectClientBaseResultItem = { - ETag: eTag, - LastModified: lastModified, - Size: size, + ETag: etagValue, + LastModified: lastModifiedValue, + Size: sizeValue, }; const listResultItem = { - eTag, - lastModified, - size, + eTag: etagValue, + lastModified: lastModifiedValue, + size: sizeValue, }; const mockListObjectsV2ApiWithPages = (pages: number) => { let methodCalls = 0; mockListObject.mockClear(); mockListObject.mockImplementation(async (_, input) => { - let token: string | undefined = undefined; + let token: string | undefined; methodCalls++; if (methodCalls > pages) { fail(`listObjectsV2 calls are more than expected. Expected ${pages}`); @@ -72,6 +73,7 @@ const mockListObjectsV2ApiWithPages = (pages: number) => { if (input.ContinuationToken === undefined || methodCalls < pages) { token = nextToken; } + return { Contents: [{ ...listObjectClientBaseResultItem, Key: input.Prefix }], NextContinuationToken: token, @@ -104,14 +106,14 @@ describe('list API', () => { jest.clearAllMocks(); }); - const accessLevelTests: Array<{ + const accessLevelTests: { prefix?: string; expectedKey: string; options?: { accessLevel?: StorageAccessLevel; targetIdentityId?: string; }; - }> = [ + }[] = [ { expectedKey: `public/`, }, @@ -120,28 +122,28 @@ describe('list API', () => { expectedKey: `public/`, }, { - prefix: key, - expectedKey: `public/${key}`, + prefix: inputKey, + expectedKey: `public/${inputKey}`, }, { - prefix: key, + prefix: inputKey, options: { accessLevel: 'guest' }, - expectedKey: `public/${key}`, + expectedKey: `public/${inputKey}`, }, { - prefix: key, + prefix: inputKey, options: { accessLevel: 'private' }, - expectedKey: `private/${defaultIdentityId}/${key}`, + expectedKey: `private/${defaultIdentityId}/${inputKey}`, }, { - prefix: key, + prefix: inputKey, options: { accessLevel: 'protected' }, - expectedKey: `protected/${defaultIdentityId}/${key}`, + expectedKey: `protected/${defaultIdentityId}/${inputKey}`, }, { - prefix: key, + prefix: inputKey, options: { accessLevel: 'protected', targetIdentityId }, - expectedKey: `protected/${targetIdentityId}/${key}`, + expectedKey: `protected/${targetIdentityId}/${inputKey}`, }, ]; @@ -160,7 +162,7 @@ describe('list API', () => { }); const response = await listPaginatedWrapper({ prefix, - options: options, + options, }); const { key, eTag, size, lastModified } = response.items[0]; expect(response.items).toHaveLength(1); @@ -197,7 +199,7 @@ describe('list API', () => { options: { ...options, pageSize: customPageSize, - nextToken: nextToken, + nextToken, }, }); const { key, eTag, size, lastModified } = response.items[0]; @@ -227,7 +229,7 @@ describe('list API', () => { mockListObject.mockImplementationOnce(() => { return {}; }); - let response = await listPaginatedWrapper({ + const response = await listPaginatedWrapper({ prefix, options, }); @@ -242,53 +244,55 @@ describe('list API', () => { }); }); - accessLevelTests.forEach(({ prefix: inputKey, options, expectedKey }) => { - const pathMsg = inputKey ? 'custom' : 'default'; - const accessLevelMsg = options?.accessLevel ?? 'default'; - const targetIdentityIdMsg = options?.targetIdentityId - ? `with targetIdentityId` - : ''; - it(`should list all objects having three pages with ${pathMsg} path, ${accessLevelMsg} accessLevel ${targetIdentityIdMsg}`, async () => { - mockListObjectsV2ApiWithPages(3); - const result = await listAllWrapper({ - prefix: inputKey, - options: { ...options, listAll: true }, - }); - const { key, eTag, lastModified, size } = result.items[0]; - expect(result.items).toHaveLength(3); - expect({ key, eTag, lastModified, size }).toEqual({ - ...listResultItem, - key: inputKey ?? '', - }); - expect(result).not.toHaveProperty(nextToken); + accessLevelTests.forEach( + ({ prefix: inputPrefix, options, expectedKey }) => { + const pathMsg = inputPrefix ? 'custom' : 'default'; + const accessLevelMsg = options?.accessLevel ?? 'default'; + const targetIdentityIdMsg = options?.targetIdentityId + ? `with targetIdentityId` + : ''; + it(`should list all objects having three pages with ${pathMsg} path, ${accessLevelMsg} accessLevel ${targetIdentityIdMsg}`, async () => { + mockListObjectsV2ApiWithPages(3); + const result = await listAllWrapper({ + prefix: inputPrefix, + options: { ...options, listAll: true }, + }); + const { key, eTag, lastModified, size } = result.items[0]; + expect(result.items).toHaveLength(3); + expect({ key, eTag, lastModified, size }).toEqual({ + ...listResultItem, + key: inputPrefix ?? '', + }); + expect(result).not.toHaveProperty(nextToken); - // listing three times for three pages - expect(listObjectsV2).toHaveBeenCalledTimes(3); + // listing three times for three pages + expect(listObjectsV2).toHaveBeenCalledTimes(3); - // first input recieves undefined as the Continuation Token - expect(listObjectsV2).toHaveBeenNthCalledWith( - 1, - listObjectClientConfig, - { - Bucket: bucket, - Prefix: expectedKey, - MaxKeys: 1000, - ContinuationToken: undefined, - }, - ); - // last input recieves TEST_TOKEN as the Continuation Token - expect(listObjectsV2).toHaveBeenNthCalledWith( - 3, - listObjectClientConfig, - { - Bucket: bucket, - Prefix: expectedKey, - MaxKeys: 1000, - ContinuationToken: nextToken, - }, - ); - }); - }); + // first input recieves undefined as the Continuation Token + expect(listObjectsV2).toHaveBeenNthCalledWith( + 1, + listObjectClientConfig, + { + Bucket: bucket, + Prefix: expectedKey, + MaxKeys: 1000, + ContinuationToken: undefined, + }, + ); + // last input recieves TEST_TOKEN as the Continuation Token + expect(listObjectsV2).toHaveBeenNthCalledWith( + 3, + listObjectClientConfig, + { + Bucket: bucket, + Prefix: expectedKey, + MaxKeys: 1000, + ContinuationToken: nextToken, + }, + ); + }); + }, + ); }); describe('Path: Happy Cases:', () => { @@ -298,7 +302,9 @@ describe('list API', () => { const listPaginatedWrapper = ( input: ListPaginateWithPathInput, ): Promise => list(input); - const resolvePath = (path: string | Function) => + const resolvePath = ( + path: string | (({ identityId }: { identityId: string }) => string), + ) => typeof path === 'string' ? path : path({ identityId: defaultIdentityId }); afterEach(() => { jest.clearAllMocks(); @@ -306,11 +312,11 @@ describe('list API', () => { }); const pathTestCases = [ { - path: `public/${key}`, + path: `public/${inputKey}`, }, { path: ({ identityId }: { identityId: string }) => - `protected/${identityId}/${key}`, + `protected/${identityId}/${inputKey}`, }, ]; @@ -349,7 +355,7 @@ describe('list API', () => { ); it.each(pathTestCases)( - 'should list objects with pagination using custom pageSize, nextToken and custom path: ${path}', + 'should list objects with pagination using custom pageSize, nextToken and custom path: $path', async ({ path: inputPath }) => { const resolvedPath = resolvePath(inputPath); mockListObject.mockImplementationOnce(() => { @@ -368,7 +374,7 @@ describe('list API', () => { path: resolvedPath, options: { pageSize: customPageSize, - nextToken: nextToken, + nextToken, }, }); const { path, eTag, lastModified, size } = response.items[0]; @@ -389,12 +395,12 @@ describe('list API', () => { ); it.each(pathTestCases)( - 'should list objects with zero results with custom path: ${path}', + 'should list objects with zero results with custom path: $path', async ({ path }) => { mockListObject.mockImplementationOnce(() => { return {}; }); - let response = await listPaginatedWrapper({ + const response = await listPaginatedWrapper({ path: resolvePath(path), }); expect(response.items).toEqual([]); @@ -409,7 +415,7 @@ describe('list API', () => { ); it.each(pathTestCases)( - 'should list all objects having three pages with custom path: ${path}', + 'should list all objects having three pages with custom path: $path', async ({ path: inputPath }) => { const resolvedPath = resolvePath(inputPath); mockListObjectsV2ApiWithPages(3); diff --git a/packages/storage/__tests__/providers/s3/apis/remove.test.ts b/packages/storage/__tests__/providers/s3/apis/remove.test.ts index 0c8662492ac..61745b54455 100644 --- a/packages/storage/__tests__/providers/s3/apis/remove.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/remove.test.ts @@ -3,13 +3,14 @@ import { AWSCredentials } from '@aws-amplify/core/internals/utils'; import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; + import { deleteObject } from '../../../../src/providers/s3/utils/client'; import { remove } from '../../../../src/providers/s3/apis'; import { StorageValidationErrorCode } from '../../../../src/errors/types/validation'; import { RemoveInput, - RemoveWithPathInput, RemoveOutput, + RemoveWithPathInput, RemoveWithPathOutput, } from '../../../../src/providers/s3/types'; @@ -73,10 +74,10 @@ describe('remove API', () => { afterEach(() => { jest.clearAllMocks(); }); - const testCases: Array<{ + const testCases: { expectedKey: string; options?: { accessLevel?: StorageAccessLevel }; - }> = [ + }[] = [ { expectedKey: `public/${inputKey}`, }, @@ -100,7 +101,7 @@ describe('remove API', () => { it(`should remove object with ${accessLevel} accessLevel`, async () => { const { key } = await removeWrapper({ key: inputKey, - options: options, + options, }); expect(key).toEqual(inputKey); expect(deleteObject).toHaveBeenCalledTimes(1); diff --git a/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts b/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts index 211d3238a35..938ca8863ee 100644 --- a/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts @@ -5,8 +5,8 @@ import { uploadData } from '../../../../../src/providers/s3/apis'; import { MAX_OBJECT_SIZE } from '../../../../../src/providers/s3/utils/constants'; import { createUploadTask } from '../../../../../src/providers/s3/utils'; import { - validationErrorMap, StorageValidationErrorCode, + validationErrorMap, } from '../../../../../src/errors/types/validation'; import { putObjectJob } from '../../../../../src/providers/s3/apis/uploadData/putObjectJob'; import { getMultipartUploadHandlers } from '../../../../../src/providers/s3/apis/uploadData/multipart'; diff --git a/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts b/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts index 302d76beaa8..d8300dde305 100644 --- a/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts @@ -3,18 +3,19 @@ import { AWSCredentials } from '@aws-amplify/core/internals/utils'; import { Amplify, defaultStorage } from '@aws-amplify/core'; + import { - createMultipartUpload, - uploadPart, - completeMultipartUpload, abortMultipartUpload, - listParts, + completeMultipartUpload, + createMultipartUpload, headObject, + listParts, + uploadPart, } from '../../../../../src/providers/s3/utils/client'; import { getMultipartUploadHandlers } from '../../../../../src/providers/s3/apis/uploadData/multipart'; import { - validationErrorMap, StorageValidationErrorCode, + validationErrorMap, } from '../../../../../src/errors/types/validation'; import { UPLOADS_STORAGE_KEY } from '../../../../../src/providers/s3/utils/constants'; import { byteLength } from '../../../../../src/providers/s3/apis/uploadData/byteLength'; @@ -46,7 +47,7 @@ const mockAbortMultipartUpload = abortMultipartUpload as jest.Mock; const mockListParts = listParts as jest.Mock; const mockHeadObject = headObject as jest.Mock; -const disableAssertion = true; +const disableAssertionFlag = true; const MB = 1024 * 1024; @@ -88,7 +89,7 @@ const mockMultipartUploadSuccess = (disableAssertion?: boolean) => { const mockMultipartUploadCancellation = ( beforeUploadPartResponseCallback?: () => void, ) => { - mockCreateMultipartUpload.mockImplementation(async ({ abortSignal }) => ({ + mockCreateMultipartUpload.mockImplementation(async () => ({ UploadId: 'uploadId', })); @@ -97,6 +98,7 @@ const mockMultipartUploadCancellation = ( if (abortSignal?.aborted) { throw new Error('AbortError'); } + return { ETag: `etag-${PartNumber}`, PartNumber, @@ -239,6 +241,7 @@ describe('getMultipartUploadHandlers with key', () => { if (end - start !== buffer?.byteLength) { buffer = new ArrayBuffer(end - start); } + return buffer; }), } as any as File; @@ -260,7 +263,7 @@ describe('getMultipartUploadHandlers with key', () => { it('should throw error when remote and local file sizes do not match upon completed upload', async () => { expect.assertions(1); - mockMultipartUploadSuccess(disableAssertion); + mockMultipartUploadSuccess(disableAssertionFlag); mockHeadObject.mockReset(); mockHeadObject.mockResolvedValue({ ContentLength: 1, @@ -298,7 +301,7 @@ describe('getMultipartUploadHandlers with key', () => { it('should handle error case: finish multipart upload failed', async () => { expect.assertions(1); - mockMultipartUploadSuccess(disableAssertion); + mockMultipartUploadSuccess(disableAssertionFlag); mockCompleteMultipartUpload.mockReset(); mockCompleteMultipartUpload.mockRejectedValueOnce(new Error('error')); @@ -311,7 +314,7 @@ describe('getMultipartUploadHandlers with key', () => { it('should handle error case: upload a body that splits in two parts but second part fails', async () => { expect.assertions(3); - mockMultipartUploadSuccess(disableAssertion); + mockMultipartUploadSuccess(disableAssertionFlag); mockUploadPart.mockReset(); mockUploadPart.mockResolvedValueOnce({ ETag: `etag-1`, @@ -486,10 +489,10 @@ describe('getMultipartUploadHandlers with key', () => { it('should remove from cache if upload task is canceled', async () => { expect.assertions(2); - mockMultipartUploadSuccess(disableAssertion); + mockMultipartUploadSuccess(disableAssertionFlag); mockListParts.mockResolvedValueOnce({ Parts: [] }); const size = 8 * MB; - const { multipartUploadJob, onCancel } = getMultipartUploadHandlers( + const { multipartUploadJob } = getMultipartUploadHandlers( { key: defaultKey, data: new ArrayBuffer(size), @@ -745,6 +748,7 @@ describe('getMultipartUploadHandlers with path', () => { if (end - start !== buffer?.byteLength) { buffer = new ArrayBuffer(end - start); } + return buffer; }), } as any as File; @@ -766,7 +770,7 @@ describe('getMultipartUploadHandlers with path', () => { it('should throw error when remote and local file sizes do not match upon completed upload', async () => { expect.assertions(1); - mockMultipartUploadSuccess(disableAssertion); + mockMultipartUploadSuccess(disableAssertionFlag); mockHeadObject.mockReset(); mockHeadObject.mockResolvedValue({ ContentLength: 1, @@ -804,7 +808,7 @@ describe('getMultipartUploadHandlers with path', () => { it('should handle error case: finish multipart upload failed', async () => { expect.assertions(1); - mockMultipartUploadSuccess(disableAssertion); + mockMultipartUploadSuccess(disableAssertionFlag); mockCompleteMultipartUpload.mockReset(); mockCompleteMultipartUpload.mockRejectedValueOnce(new Error('error')); @@ -817,7 +821,7 @@ describe('getMultipartUploadHandlers with path', () => { it('should handle error case: upload a body that splits in two parts but second part fails', async () => { expect.assertions(3); - mockMultipartUploadSuccess(disableAssertion); + mockMultipartUploadSuccess(disableAssertionFlag); mockUploadPart.mockReset(); mockUploadPart.mockResolvedValueOnce({ ETag: `etag-1`, @@ -993,10 +997,10 @@ describe('getMultipartUploadHandlers with path', () => { it('should remove from cache if upload task is canceled', async () => { expect.assertions(2); - mockMultipartUploadSuccess(disableAssertion); + mockMultipartUploadSuccess(disableAssertionFlag); mockListParts.mockResolvedValueOnce({ Parts: [] }); const size = 8 * MB; - const { multipartUploadJob, onCancel } = getMultipartUploadHandlers( + const { multipartUploadJob } = getMultipartUploadHandlers( { path: testPath, data: new ArrayBuffer(size), diff --git a/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts b/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts index b03822946da..3d76fe3776e 100644 --- a/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts @@ -3,6 +3,7 @@ import { AWSCredentials } from '@aws-amplify/core/internals/utils'; import { Amplify } from '@aws-amplify/core'; + import { putObject } from '../../../../../src/providers/s3/utils/client'; import { calculateContentMd5 } from '../../../../../src/providers/s3/utils'; import { putObjectJob } from '../../../../../src/providers/s3/apis/uploadData/putObjectJob'; @@ -10,6 +11,7 @@ import { putObjectJob } from '../../../../../src/providers/s3/apis/uploadData/pu jest.mock('../../../../../src/providers/s3/utils/client'); jest.mock('../../../../../src/providers/s3/utils', () => { const utils = jest.requireActual('../../../../../src/providers/s3/utils'); + return { ...utils, calculateContentMd5: jest.fn(), diff --git a/packages/storage/__tests__/providers/s3/apis/utils/downloadTask.test.ts b/packages/storage/__tests__/providers/s3/apis/utils/downloadTask.test.ts index 4220759724f..494747bda35 100644 --- a/packages/storage/__tests__/providers/s3/apis/utils/downloadTask.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/utils/downloadTask.test.ts @@ -66,8 +66,7 @@ describe('createDownloadTask', () => { job: jest.fn(), onCancel, }); - // TODO[AllanZhengYP]: Use ts-expect-error instead after upgrading Jest. - // @ts-ignore + // @ts-expect-error assign to read-only task.state = state; task.cancel(); expect(onCancel).not.toHaveBeenCalled(); diff --git a/packages/storage/__tests__/providers/s3/apis/utils/resolvePrefix.test.ts b/packages/storage/__tests__/providers/s3/apis/utils/resolvePrefix.test.ts index 0d728e4e4d2..2e31d345ac7 100644 --- a/packages/storage/__tests__/providers/s3/apis/utils/resolvePrefix.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/utils/resolvePrefix.test.ts @@ -3,8 +3,8 @@ import { resolvePrefix } from '../../../../../src/utils/resolvePrefix'; import { - validationErrorMap, StorageValidationErrorCode, + validationErrorMap, } from '../../../../../src/errors/types/validation'; describe('resolvePrefix', () => { diff --git a/packages/storage/__tests__/providers/s3/apis/utils/uploadTask.test.ts b/packages/storage/__tests__/providers/s3/apis/utils/uploadTask.test.ts index 1eeacb61f85..ca3c50861d6 100644 --- a/packages/storage/__tests__/providers/s3/apis/utils/uploadTask.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/utils/uploadTask.test.ts @@ -67,8 +67,7 @@ describe('createUploadTask', () => { job: jest.fn(), onCancel, }); - // TODO[AllanZhengYP]: Use ts-expect-error instead after upgrading Jest. - // @ts-ignore + // @ts-expect-error assign to read-only task.state = state; task.cancel(); expect(onCancel).not.toHaveBeenCalled(); @@ -100,8 +99,7 @@ describe('createUploadTask', () => { onPause, isMultipartUpload: true, }); - // TODO[AllanZhengYP]: Use ts-expect-error instead after upgrading Jest. - // @ts-ignore + // @ts-expect-error assign to read-only task.state = state; task.pause(); expect(onPause).not.toHaveBeenCalled(); @@ -134,8 +132,7 @@ describe('createUploadTask', () => { onResume, isMultipartUpload: true, }); - // TODO[AllanZhengYP]: Use ts-expect-error instead after upgrading Jest. - // @ts-ignore + // @ts-expect-error assign to read-only task.state = state; task.resume(); expect(onResume).not.toHaveBeenCalled(); diff --git a/packages/storage/__tests__/providers/s3/apis/utils/validateStorageOperationInput.test.ts b/packages/storage/__tests__/providers/s3/apis/utils/validateStorageOperationInput.test.ts index 14b1f6204f0..d86cb9c6c40 100644 --- a/packages/storage/__tests__/providers/s3/apis/utils/validateStorageOperationInput.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/utils/validateStorageOperationInput.test.ts @@ -45,9 +45,8 @@ describe('validateStorageOperationInput', () => { it('should throw an error when input path starts with a /', () => { const input = { path: '/leading-slash-path' }; expect(() => validateStorageOperationInput(input)).toThrow( - validationErrorMap[ - StorageValidationErrorCode.InvalidStoragePathInput - ].message, + validationErrorMap[StorageValidationErrorCode.InvalidStoragePathInput] + .message, ); }); diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/abortMultipartUpload.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/abortMultipartUpload.ts index 68144f636e0..4628c433e51 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/abortMultipartUpload.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/abortMultipartUpload.ts @@ -3,9 +3,10 @@ import { abortMultipartUpload } from '../../../../../../../src/providers/s3/utils/client'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, } from './shared'; diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/completeMultipartUpload.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/completeMultipartUpload.ts index 2adf7e3ef6f..125cb505e4c 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/completeMultipartUpload.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/completeMultipartUpload.ts @@ -3,9 +3,10 @@ import { completeMultipartUpload } from '../../../../../../../src/providers/s3/utils/client'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, } from './shared'; diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/copyObject.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/copyObject.ts index b5f027fb4b9..746ca373057 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/copyObject.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/copyObject.ts @@ -3,9 +3,10 @@ import { copyObject } from '../../../../../../../src/providers/s3/utils/client'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, } from './shared'; diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/createMultipartUpload.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/createMultipartUpload.ts index 098caa849f8..df13908e715 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/createMultipartUpload.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/createMultipartUpload.ts @@ -3,12 +3,13 @@ import { createMultipartUpload } from '../../../../../../../src/providers/s3/utils/client'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, } from './shared'; -import { putObjectRequest, expectedPutObjectRequestHeaders } from './putObject'; +import { expectedPutObjectRequestHeaders, putObjectRequest } from './putObject'; // API reference: https://docs.aws.amazon.com/AmazonS3/latest/API/API_CreateMultipartUpload.html const createMultiPartUploadHappyCase: ApiFunctionalTestCase< diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/deleteObject.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/deleteObject.ts index 5591e3a8800..f0a4439e13f 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/deleteObject.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/deleteObject.ts @@ -3,9 +3,10 @@ import { deleteObject } from '../../../../../../../src/providers/s3/utils/client'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, } from './shared'; diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/getObject.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/getObject.ts index 7893c045924..c6b1e038926 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/getObject.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/getObject.ts @@ -2,13 +2,12 @@ // SPDX-License-Identifier: Apache-2.0 import { getObject } from '../../../../../../../src/providers/s3/utils/client'; -import { toBase64 } from '../../../../../../../src/providers/s3/utils/client/utils'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, - EMPTY_SHA256, } from './shared'; const getObjectResponseHeaders = { diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/headObject.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/headObject.ts index c919e560fef..2275d7ac850 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/headObject.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/headObject.ts @@ -2,11 +2,11 @@ // SPDX-License-Identifier: Apache-2.0 import { headObject } from '../../../../../../../src/providers/s3/utils/client'; -import { toBase64 } from '../../../../../../../src/providers/s3/utils/client/utils'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, } from './shared'; diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/listObjectsV2.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/listObjectsV2.ts index fed49cd8c3b..7524a8daeb6 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/listObjectsV2.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/listObjectsV2.ts @@ -3,9 +3,10 @@ import { listObjectsV2 } from '../../../../../../../src/providers/s3/utils/client'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, } from './shared'; diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/listParts.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/listParts.ts index 58beae4fcd4..3e809d12bdc 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/listParts.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/listParts.ts @@ -3,13 +3,14 @@ import { listParts } from '../../../../../../../src/providers/s3/utils/client'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, } from './shared'; -//API Reference: https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListParts.html +// API Reference: https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListParts.html const listPartsHappyCase: ApiFunctionalTestCase = [ 'happy case', 'listParts', diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/putObject.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/putObject.ts index 36714b7d831..930870a7c15 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/putObject.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/putObject.ts @@ -3,9 +3,10 @@ import { putObject } from '../../../../../../../src/providers/s3/utils/client'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, } from './shared'; diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/uploadPart.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/uploadPart.ts index b419317acfb..b4906b223c2 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/cases/uploadPart.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/cases/uploadPart.ts @@ -3,9 +3,10 @@ import { uploadPart } from '../../../../../../../src/providers/s3/utils/client'; import { ApiFunctionalTestCase } from '../../testUtils/types'; + import { - defaultConfig, DEFAULT_RESPONSE_HEADERS, + defaultConfig, expectedMetadata, } from './shared'; diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/functional-apis.test.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/functional-apis.test.ts index d625218a6fc..62b4aff0cf5 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/functional-apis.test.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/functional-apis.test.ts @@ -2,10 +2,11 @@ // SPDX-License-Identifier: Apache-2.0 import { HttpResponse } from '@aws-amplify/core/internals/aws-client-utils'; + import { s3TransferHandler } from '../../../../../../src/providers/s3/utils/client/runtime/s3TransferHandler/fetch'; +import { StorageError } from '../../../../../../src/errors/StorageError'; import cases from './cases'; -import { StorageError } from '../../../../../../src/errors/StorageError'; jest.mock( '../../../../../../src/providers/s3/utils/client/runtime/s3TransferHandler/fetch', @@ -30,6 +31,7 @@ const mockBinaryResponse = ({ blob: async () => new Blob([body], { type: 'plain/text' }), text: async () => body, } as HttpResponse['body']; + return { statusCode: status, headers, diff --git a/packages/storage/__tests__/providers/s3/utils/client/S3/getPresignedGetObjectUrl.test.ts b/packages/storage/__tests__/providers/s3/utils/client/S3/getPresignedGetObjectUrl.test.ts index 0108bd37084..7fbfcdeb3a1 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/S3/getPresignedGetObjectUrl.test.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/S3/getPresignedGetObjectUrl.test.ts @@ -2,18 +2,20 @@ // SPDX-License-Identifier: Apache-2.0 import { presignUrl } from '@aws-amplify/core/internals/aws-client-utils'; + import { getPresignedGetObjectUrl } from '../../../../../../src/providers/s3/utils/client'; + import { defaultConfig } from './cases/shared'; jest.mock('@aws-amplify/core/internals/aws-client-utils', () => { const original = jest.requireActual( '@aws-amplify/core/internals/aws-client-utils', ); - const presignUrl = original.presignUrl; + const { presignUrl: getPresignedUrl } = original; return { ...original, - presignUrl: jest.fn((...args) => presignUrl.apply(null, args)), + presignUrl: jest.fn((...args) => getPresignedUrl(...args)), }; }); diff --git a/packages/storage/__tests__/providers/s3/utils/client/base64/base64-browser.test.ts b/packages/storage/__tests__/providers/s3/utils/client/base64/base64-browser.test.ts index a9e89bd8500..37a7134fd69 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/base64/base64-browser.test.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/base64/base64-browser.test.ts @@ -1,16 +1,17 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { TextDecoder, TextEncoder } from 'util'; + import { toBase64 } from '../../../../../../src/providers/s3/utils/client/runtime/base64/index.browser'; -import { TextEncoder, TextDecoder } from 'util'; import { toBase64TestCases } from './cases'; Object.assign(global, { TextDecoder, TextEncoder }); describe('base64 until for browser', () => { describe('toBase64()', () => { - for (let { input, expected } of toBase64TestCases) { + for (const { input, expected } of toBase64TestCases) { it(`it should base64 encode ${input}`, () => { expect(toBase64(input)).toStrictEqual(expected); }); diff --git a/packages/storage/__tests__/providers/s3/utils/client/base64/base64-native.test.ts b/packages/storage/__tests__/providers/s3/utils/client/base64/base64-native.test.ts index 173bf4942a5..3385429c2e6 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/base64/base64-native.test.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/base64/base64-native.test.ts @@ -2,11 +2,12 @@ // SPDX-License-Identifier: Apache-2.0 import { toBase64 } from '../../../../../../src/providers/s3/utils/client/runtime/base64/index.native'; + import { toBase64TestCases } from './cases'; describe('base64 until for browser', () => { describe('toBase64()', () => { - for (let { input, expected } of toBase64TestCases) { + for (const { input, expected } of toBase64TestCases) { it(`it should base64 encode ${input}`, () => { expect(toBase64(input)).toStrictEqual(expected); }); diff --git a/packages/storage/__tests__/providers/s3/utils/client/testUtils/mocks.ts b/packages/storage/__tests__/providers/s3/utils/client/testUtils/mocks.ts index cec66b98cce..a7cf3bf55ec 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/testUtils/mocks.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/testUtils/mocks.ts @@ -1,7 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { XhrSpy, XhrProgressEvent } from './types'; +import { XhrProgressEvent, XhrSpy } from './types'; /** * Mock XMLHttpRequest instance so we can spy on the methods and listeners. @@ -29,7 +29,8 @@ export const spyOnXhr = (): XhrSpy => { }), abort: jest.fn(), }; - window['XMLHttpRequest'] = jest.fn(() => mockRequest) as any; + window.XMLHttpRequest = jest.fn(() => mockRequest) as any; + return Object.assign(mockRequest, { uploadListeners, listeners, @@ -79,8 +80,8 @@ export const mockXhrResponse = ( */ export const mockProgressEvents = (options: { mockXhr: XhrSpy; - uploadEvents?: Array; - downloadEvents?: Array; + uploadEvents?: XhrProgressEvent[]; + downloadEvents?: XhrProgressEvent[]; }) => { const { mockXhr, uploadEvents, downloadEvents } = options; uploadEvents?.forEach(event => { diff --git a/packages/storage/__tests__/providers/s3/utils/client/testUtils/types.ts b/packages/storage/__tests__/providers/s3/utils/client/testUtils/types.ts index 237508ee9af..b47d2ec7695 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/testUtils/types.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/testUtils/types.ts @@ -3,11 +3,11 @@ import { HttpRequest } from '@aws-amplify/core/internals/aws-client-utils'; -type MockFetchResponse = { +interface MockFetchResponse { body: BodyInit; headers: HeadersInit; status: number; -}; +} // TODO: remove this after upgrading ts-jest type Awaited = T extends PromiseLike ? U : never; @@ -31,7 +31,7 @@ type ApiFunctionalTestErrorCase any> = [ Parameters[1], // input HttpRequest, // expected request MockFetchResponse, // response - {}, // error + { name: string; message: string }, // error ]; /** @@ -62,13 +62,13 @@ export type XhrProgressEvent = Pick< */ export type XhrSpy = Writeable & { uploadListeners: Partial<{ - [name in keyof XMLHttpRequestEventTargetEventMap]: Array< - (event: XMLHttpRequestEventTargetEventMap[name]) => void - >; + [name in keyof XMLHttpRequestEventTargetEventMap]: (( + event: XMLHttpRequestEventTargetEventMap[name], + ) => void)[]; }>; listeners: Partial<{ - [name in keyof XMLHttpRequestEventMap]: Array< - (event: XMLHttpRequestEventMap[name]) => void - >; + [name in keyof XMLHttpRequestEventMap]: (( + event: XMLHttpRequestEventMap[name], + ) => void)[]; }>; }; diff --git a/packages/storage/__tests__/providers/s3/utils/client/xhrTransferHandler-util.test.ts b/packages/storage/__tests__/providers/s3/utils/client/xhrTransferHandler-util.test.ts index ab28eae4a5b..0bf3439c1ab 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/xhrTransferHandler-util.test.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/xhrTransferHandler-util.test.ts @@ -3,13 +3,14 @@ import { xhrTransferHandler } from '../../../../../src/providers/s3/utils/client/runtime/xhrTransferHandler'; import { isCancelError } from '../../../../../src/errors/CanceledError'; + import { - spyOnXhr, + mockProgressEvents, + mockXhrReadyState, mockXhrResponse, + spyOnXhr, triggerNetWorkError, triggerServerSideAbort, - mockXhrReadyState, - mockProgressEvents, } from './testUtils/mocks'; jest.mock('@aws-amplify/core'); @@ -33,22 +34,22 @@ const mockReadablStreamCtor = jest.fn(); describe('xhrTransferHandler', () => { const originalXhr = window.XMLHttpRequest; const originalReadableStream = window.ReadableStream; - const originalFileReaderCtor = window.FileReader; + const OriginalFileReaderCtor = window.FileReader; beforeEach(() => { jest.resetAllMocks(); window.ReadableStream = mockReadablStreamCtor; window.FileReader = Object.assign( jest.fn().mockImplementation(() => { - return new originalFileReaderCtor(); + return new OriginalFileReaderCtor(); }), - { ...originalFileReaderCtor }, + { ...OriginalFileReaderCtor }, ); }); afterEach(() => { window.XMLHttpRequest = originalXhr; window.ReadableStream = originalReadableStream; - window.FileReader = originalFileReaderCtor; + window.FileReader = OriginalFileReaderCtor; }); it('should call xhr.open with the correct arguments', async () => { diff --git a/packages/storage/__tests__/providers/s3/utils/client/xmlParser-util.test.ts b/packages/storage/__tests__/providers/s3/utils/client/xmlParser-util.test.ts index fe818e42777..a2432cdfd61 100644 --- a/packages/storage/__tests__/providers/s3/utils/client/xmlParser-util.test.ts +++ b/packages/storage/__tests__/providers/s3/utils/client/xmlParser-util.test.ts @@ -1,10 +1,11 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import cases from './xmlParser-fixture'; import { parser as browserParser } from '../../../../../src/providers/s3/utils/client/runtime/index.browser'; import { parser as nodeParser } from '../../../../../src/providers/s3/utils/client/runtime/index'; +import cases from './xmlParser-fixture'; + describe('xmlParser for browsers', () => { cases.forEach(({ spec, xml, expected }) => { it(`should parse ${spec} correctly`, () => { diff --git a/yarn.lock b/yarn.lock index 3ba05bc1428..88b518e61d7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2261,25 +2261,6 @@ pouchdb-collections "^1.0.1" tiny-queue "^0.2.1" -"@fimbul/bifrost@^0.21.0": - version "0.21.0" - resolved "https://registry.yarnpkg.com/@fimbul/bifrost/-/bifrost-0.21.0.tgz#d0fafa25938fda475657a6a1e407a21bbe02c74e" - integrity sha512-ou8VU+nTmOW1jeg+FT+sn+an/M0Xb9G16RucrfhjXGWv1Q97kCoM5CG9Qj7GYOSdu7km72k7nY83Eyr53Bkakg== - dependencies: - "@fimbul/ymir" "^0.21.0" - get-caller-file "^2.0.0" - tslib "^1.8.1" - tsutils "^3.5.0" - -"@fimbul/ymir@^0.21.0": - version "0.21.0" - resolved "https://registry.yarnpkg.com/@fimbul/ymir/-/ymir-0.21.0.tgz#8525726787aceeafd4e199472c0d795160b5d4a1" - integrity sha512-T/y7WqPsm4n3zhT08EpB5sfdm2Kvw3gurAxr2Lr5dQeLi8ZsMlNT/Jby+ZmuuAAd1PnXYzKp+2SXgIkQIIMCUg== - dependencies: - inversify "^5.0.0" - reflect-metadata "^0.1.12" - tslib "^1.8.1" - "@gar/promisify@^1.0.1", "@gar/promisify@^1.1.3": version "1.1.3" resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6" @@ -2777,55 +2758,55 @@ write-pkg "4.0.0" yargs "16.2.0" -"@next/env@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/env/-/env-14.1.0.tgz#43d92ebb53bc0ae43dcc64fb4d418f8f17d7a341" - integrity sha512-Py8zIo+02ht82brwwhTg36iogzFqGLPXlRGKQw5s+qP/kMNc4MAyDeEwBKDijk6zTIbegEgu8Qy7C1LboslQAw== - -"@next/swc-darwin-arm64@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.0.tgz#70a57c87ab1ae5aa963a3ba0f4e59e18f4ecea39" - integrity sha512-nUDn7TOGcIeyQni6lZHfzNoo9S0euXnu0jhsbMOmMJUBfgsnESdjN97kM7cBqQxZa8L/bM9om/S5/1dzCrW6wQ== - -"@next/swc-darwin-x64@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.0.tgz#0863a22feae1540e83c249384b539069fef054e9" - integrity sha512-1jgudN5haWxiAl3O1ljUS2GfupPmcftu2RYJqZiMJmmbBT5M1XDffjUtRUzP4W3cBHsrvkfOFdQ71hAreNQP6g== - -"@next/swc-linux-arm64-gnu@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.0.tgz#893da533d3fce4aec7116fe772d4f9b95232423c" - integrity sha512-RHo7Tcj+jllXUbK7xk2NyIDod3YcCPDZxj1WLIYxd709BQ7WuRYl3OWUNG+WUfqeQBds6kvZYlc42NJJTNi4tQ== - -"@next/swc-linux-arm64-musl@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.0.tgz#d81ddcf95916310b8b0e4ad32b637406564244c0" - integrity sha512-v6kP8sHYxjO8RwHmWMJSq7VZP2nYCkRVQ0qolh2l6xroe9QjbgV8siTbduED4u0hlk0+tjS6/Tuy4n5XCp+l6g== - -"@next/swc-linux-x64-gnu@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.0.tgz#18967f100ec19938354332dcb0268393cbacf581" - integrity sha512-zJ2pnoFYB1F4vmEVlb/eSe+VH679zT1VdXlZKX+pE66grOgjmKJHKacf82g/sWE4MQ4Rk2FMBCRnX+l6/TVYzQ== - -"@next/swc-linux-x64-musl@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.0.tgz#77077cd4ba8dda8f349dc7ceb6230e68ee3293cf" - integrity sha512-rbaIYFt2X9YZBSbH/CwGAjbBG2/MrACCVu2X0+kSykHzHnYH5FjHxwXLkcoJ10cX0aWCEynpu+rP76x0914atg== - -"@next/swc-win32-arm64-msvc@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.0.tgz#5f0b8cf955644104621e6d7cc923cad3a4c5365a" - integrity sha512-o1N5TsYc8f/HpGt39OUQpQ9AKIGApd3QLueu7hXk//2xq5Z9OxmV6sQfNp8C7qYmiOlHYODOGqNNa0e9jvchGQ== - -"@next/swc-win32-ia32-msvc@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.0.tgz#21f4de1293ac5e5a168a412b139db5d3420a89d0" - integrity sha512-XXIuB1DBRCFwNO6EEzCTMHT5pauwaSj4SWs7CYnME57eaReAKBXCnkUE80p/pAZcewm7hs+vGvNqDPacEXHVkw== - -"@next/swc-win32-x64-msvc@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.0.tgz#e561fb330466d41807123d932b365cf3d33ceba2" - integrity sha512-9WEbVRRAqJ3YFVqEZIxUqkiO8l1nool1LmNxygr5HWF8AcSYsEpneUDhmjUVJEzO2A04+oPtZdombzzPPkTtgg== +"@next/env@14.1.1": + version "14.1.1" + resolved "https://registry.yarnpkg.com/@next/env/-/env-14.1.1.tgz#80150a8440eb0022a73ba353c6088d419b908bac" + integrity sha512-7CnQyD5G8shHxQIIg3c7/pSeYFeMhsNbpU/bmvH7ZnDql7mNRgg8O2JZrhrc/soFnfBnKP4/xXNiiSIPn2w8gA== + +"@next/swc-darwin-arm64@14.1.1": + version "14.1.1" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.1.tgz#b74ba7c14af7d05fa2848bdeb8ee87716c939b64" + integrity sha512-yDjSFKQKTIjyT7cFv+DqQfW5jsD+tVxXTckSe1KIouKk75t1qZmj/mV3wzdmFb0XHVGtyRjDMulfVG8uCKemOQ== + +"@next/swc-darwin-x64@14.1.1": + version "14.1.1" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.1.tgz#82c3e67775e40094c66e76845d1a36cc29c9e78b" + integrity sha512-KCQmBL0CmFmN8D64FHIZVD9I4ugQsDBBEJKiblXGgwn7wBCSe8N4Dx47sdzl4JAg39IkSN5NNrr8AniXLMb3aw== + +"@next/swc-linux-arm64-gnu@14.1.1": + version "14.1.1" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.1.tgz#4f4134457b90adc5c3d167d07dfb713c632c0caa" + integrity sha512-YDQfbWyW0JMKhJf/T4eyFr4b3tceTorQ5w2n7I0mNVTFOvu6CGEzfwT3RSAQGTi/FFMTFcuspPec/7dFHuP7Eg== + +"@next/swc-linux-arm64-musl@14.1.1": + version "14.1.1" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.1.tgz#594bedafaeba4a56db23a48ffed2cef7cd09c31a" + integrity sha512-fiuN/OG6sNGRN/bRFxRvV5LyzLB8gaL8cbDH5o3mEiVwfcMzyE5T//ilMmaTrnA8HLMS6hoz4cHOu6Qcp9vxgQ== + +"@next/swc-linux-x64-gnu@14.1.1": + version "14.1.1" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.1.tgz#cb4e75f1ff2b9bcadf2a50684605928ddfc58528" + integrity sha512-rv6AAdEXoezjbdfp3ouMuVqeLjE1Bin0AuE6qxE6V9g3Giz5/R3xpocHoAi7CufRR+lnkuUjRBn05SYJ83oKNQ== + +"@next/swc-linux-x64-musl@14.1.1": + version "14.1.1" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.1.tgz#15f26800df941b94d06327f674819ab64b272e25" + integrity sha512-YAZLGsaNeChSrpz/G7MxO3TIBLaMN8QWMr3X8bt6rCvKovwU7GqQlDu99WdvF33kI8ZahvcdbFsy4jAFzFX7og== + +"@next/swc-win32-arm64-msvc@14.1.1": + version "14.1.1" + resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.1.tgz#060c134fa7fa843666e3e8574972b2b723773dd9" + integrity sha512-1L4mUYPBMvVDMZg1inUYyPvFSduot0g73hgfD9CODgbr4xiTYe0VOMTZzaRqYJYBA9mana0x4eaAaypmWo1r5A== + +"@next/swc-win32-ia32-msvc@14.1.1": + version "14.1.1" + resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.1.tgz#5c06889352b1f77e3807834a0d0afd7e2d2d1da2" + integrity sha512-jvIE9tsuj9vpbbXlR5YxrghRfMuG0Qm/nZ/1KDHc+y6FpnZ/apsgh+G6t15vefU0zp3WSpTMIdXRUsNl/7RSuw== + +"@next/swc-win32-x64-msvc@14.1.1": + version "14.1.1" + resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.1.tgz#d38c63a8f9b7f36c1470872797d3735b4a9c5c52" + integrity sha512-S6K6EHDU5+1KrBDLko7/c1MNy/Ya73pIAmvKeFwsF4RmBFJSO7/7YeD4FnZ4iBdzE69PpQ4sOMU9ORKeNuxe8A== "@nicolo-ribaudo/chokidar-2@2.1.8-no-fsevents.3": version "2.1.8-no-fsevents.3" @@ -5561,6 +5542,11 @@ ansi-escapes@^4.2.1: dependencies: type-fest "^0.21.3" +ansi-escapes@^6.2.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-6.2.1.tgz#76c54ce9b081dad39acec4b5d53377913825fb0f" + integrity sha512-4nJ3yixlEthEJ9Rk4vPcdBRkZvQZlYyu8j4/Mqz5sgIkddmEnH2Yj2ZrnP9S3tQOvSNRUIgVNF/1yPpRAGNRig== + ansi-fragments@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/ansi-fragments/-/ansi-fragments-0.2.1.tgz#24409c56c4cc37817c3d7caa99d8969e2de5a05e" @@ -5609,7 +5595,7 @@ ansi-styles@^5.0.0: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== -ansi-styles@^6.1.0: +ansi-styles@^6.0.0, ansi-styles@^6.1.0, ansi-styles@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== @@ -6134,11 +6120,6 @@ buffer@^6.0.3: base64-js "^1.3.1" ieee754 "^1.2.1" -builtin-modules@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" - integrity sha512-wxXCdllwGhI2kCC0MnvTGYTMvnVZTvqgypkiTI8Pa5tcz2i6VqsqwYGgqwXji+4RgCzms6EajE4IxiUH6HH8nQ== - builtin-modules@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" @@ -6311,7 +6292,12 @@ chalk@4.1.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^2.3.0, chalk@^2.4.2: +chalk@5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385" + integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w== + +chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -6437,6 +6423,13 @@ cli-cursor@3.1.0, cli-cursor@^3.1.0: dependencies: restore-cursor "^3.1.0" +cli-cursor@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-4.0.0.tgz#3cecfe3734bf4fe02a8361cbdc0f6fe28c6a57ea" + integrity sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg== + dependencies: + restore-cursor "^4.0.0" + cli-spinners@2.6.1: version "2.6.1" resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.6.1.tgz#adc954ebe281c37a6319bfa401e6dd2488ffb70d" @@ -6456,6 +6449,14 @@ cli-table3@^0.6.1: optionalDependencies: "@colors/colors" "1.5.0" +cli-truncate@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-4.0.0.tgz#6cc28a2924fee9e25ce91e973db56c7066e6172a" + integrity sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA== + dependencies: + slice-ansi "^5.0.0" + string-width "^7.0.0" + cli-width@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" @@ -6570,7 +6571,7 @@ colorette@^1.0.7: resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.4.0.tgz#5190fbb87276259a86ad700bff2c6d6faa3fca40" integrity sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g== -colorette@^2.0.14: +colorette@^2.0.14, colorette@^2.0.20: version "2.0.20" resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== @@ -6595,12 +6596,17 @@ command-exists@^1.2.8: resolved "https://registry.yarnpkg.com/command-exists/-/command-exists-1.2.9.tgz#c50725af3808c8ab0260fd60b01fbfa25b954f69" integrity sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w== +commander@11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-11.1.0.tgz#62fdce76006a68e5c1ab3314dc92e800eb83d906" + integrity sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ== + commander@^10.0.1: version "10.0.1" resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06" integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug== -commander@^2.11.0, commander@^2.12.1, commander@^2.20.0: +commander@^2.11.0, commander@^2.20.0: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== @@ -6936,7 +6942,7 @@ debug@2.6.9, debug@^2.2.0, debug@^2.6.9: dependencies: ms "2.0.0" -debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4: +debug@4, debug@4.3.4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -7148,11 +7154,6 @@ diff-sequences@^29.6.3: resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.6.3.tgz#4deaf894d11407c51efc8418012f9e70b84ea921" integrity sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q== -diff@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" - integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== - dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" @@ -7160,14 +7161,6 @@ dir-glob@^3.0.1: dependencies: path-type "^4.0.0" -doctrine@0.7.2: - version "0.7.2" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-0.7.2.tgz#7cb860359ba3be90e040b26b729ce4bfa654c523" - integrity sha512-qiB/Rir6Un6Ad/TIgTRzsremsTGWzs8j7woXvp14jgq00676uBiBT5eUOi+FgRywZFVy5Us/c04ISRpZhRbS6w== - dependencies: - esutils "^1.1.6" - isarray "0.0.1" - doctrine@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" @@ -7250,6 +7243,11 @@ emittery@^0.13.1: resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.13.1.tgz#c04b8c3457490e0847ae51fced3af52d338e3dad" integrity sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ== +emoji-regex@^10.3.0: + version "10.3.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.3.0.tgz#76998b9268409eb3dae3de989254d456e70cfe23" + integrity sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw== + emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" @@ -7722,11 +7720,6 @@ estree-walker@^2.0.2: resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== -esutils@^1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-1.1.6.tgz#c01ccaa9ae4b897c6d0c3e210ae52f3c7a844375" - integrity sha512-RG1ZkUT7iFJG9LSHr7KDuuMSlujfeTtMNIcInURxKAxhMtwQhI3NrQhz26gZQYlsYZQKzsnwtpKrFKj9K9Qu1A== - esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" @@ -7747,6 +7740,11 @@ eventemitter3@^4.0.4: resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== +eventemitter3@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" + integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== + events@^3.2.0: version "3.3.0" resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" @@ -7767,6 +7765,21 @@ execa@5.0.0: signal-exit "^3.0.3" strip-final-newline "^2.0.0" +execa@8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-8.0.1.tgz#51f6a5943b580f963c3ca9c6321796db8cc39b8c" + integrity sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^8.0.1" + human-signals "^5.0.0" + is-stream "^3.0.0" + merge-stream "^2.0.0" + npm-run-path "^5.1.0" + onetime "^6.0.0" + signal-exit "^4.1.0" + strip-final-newline "^3.0.0" + execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" @@ -8248,11 +8261,16 @@ genversion@^2.2.0: find-package "^1.0.0" mkdirp "^0.5.1" -get-caller-file@^2.0.0, get-caller-file@^2.0.1, get-caller-file@^2.0.5: +get-caller-file@^2.0.1, get-caller-file@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== +get-east-asian-width@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/get-east-asian-width/-/get-east-asian-width-1.2.0.tgz#5e6ebd9baee6fb8b7b6bd505221065f0cd91f64e" + integrity sha512-2nk+7SIVb14QrgXFHcm84tD4bKQz0RxPuMT8Ag5KPOq7J5fEmAg0UbXdTOSHqNuHSU28k55qnceesxXRZGzKWA== + get-intrinsic@^1.1.3, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2, get-intrinsic@^1.2.3, get-intrinsic@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" @@ -8301,6 +8319,11 @@ get-stream@^6.0.0: resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== +get-stream@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-8.0.1.tgz#def9dfd71742cd7754a7761ed43749a27d02eca2" + integrity sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA== + get-symbol-description@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.2.tgz#533744d5aa20aca4e079c8e5daf7fd44202821f5" @@ -8753,6 +8776,11 @@ human-signals@^2.1.0: resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== +human-signals@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-5.0.0.tgz#42665a284f9ae0dade3ba41ebc37eb4b852f3a28" + integrity sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ== + humanize-ms@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" @@ -8760,6 +8788,11 @@ humanize-ms@^1.2.1: dependencies: ms "^2.0.0" +husky@^9.0.11: + version "9.0.11" + resolved "https://registry.yarnpkg.com/husky/-/husky-9.0.11.tgz#fc91df4c756050de41b3e478b2158b87c1e79af9" + integrity sha512-AB6lFlbwwyIqMdHYhwPe+kjOC3Oc5P3nThEoW/AaO2BX3vJDjWPFxYLxokUZOo6RNX20He3AaT8sESs9NJcmEw== + iconv-lite@0.6.3, iconv-lite@^0.6.2: version "0.6.3" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" @@ -8958,11 +8991,6 @@ invariant@*, invariant@^2.2.4: dependencies: loose-envify "^1.0.0" -inversify@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/inversify/-/inversify-5.1.1.tgz#6fbd668c591337404e005a1946bfe0d802c08730" - integrity sha512-j8grHGDzv1v+8T1sAQ+3boTCntFPfvxLCkNcxB1J8qA0lUN+fAlSyYd+RXKvaPRL4AGyPxViutBEJHNXOyUdFQ== - ip-address@^9.0.5: version "9.0.5" resolved "https://registry.yarnpkg.com/ip-address/-/ip-address-9.0.5.tgz#117a960819b08780c3bd1f14ef3c1cc1d3f3ea5a" @@ -9082,6 +9110,18 @@ is-fullwidth-code-point@^3.0.0: resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== +is-fullwidth-code-point@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz#fae3167c729e7463f8461ce512b080a49268aa88" + integrity sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ== + +is-fullwidth-code-point@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz#9609efced7c2f97da7b60145ef481c787c7ba704" + integrity sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA== + dependencies: + get-east-asian-width "^1.0.0" + is-generator-fn@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" @@ -9200,6 +9240,11 @@ is-stream@^2.0.0: resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== +is-stream@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-3.0.0.tgz#e6bfd7aa6bef69f4f472ce9bb681e3e57b4319ac" + integrity sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA== + is-string@^1.0.5, is-string@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" @@ -9252,11 +9297,6 @@ is-wsl@^2.1.1, is-wsl@^2.2.0: dependencies: is-docker "^2.0.0" -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" - integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== - isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" @@ -10177,6 +10217,11 @@ lighthouse-logger@^1.0.0: debug "^2.6.9" marky "^1.2.2" +lilconfig@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.0.0.tgz#f8067feb033b5b74dab4602a5f5029420be749bc" + integrity sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g== + lilconfig@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.1.0.tgz#78e23ac89ebb7e1bfbf25b18043de756548e7f52" @@ -10200,6 +10245,34 @@ lines-and-columns@~2.0.3: resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-2.0.4.tgz#d00318855905d2660d8c0822e3f5a4715855fc42" integrity sha512-wM1+Z03eypVAVUCE7QdSqpVIvelbOakn1M0bPDoA4SGWPx3sNDVUiMo3L6To6WWGClB7VyXnhQ4Sn7gxiJbE6A== +lint-staged@^15.2.2: + version "15.2.2" + resolved "https://registry.yarnpkg.com/lint-staged/-/lint-staged-15.2.2.tgz#ad7cbb5b3ab70e043fa05bff82a09ed286bc4c5f" + integrity sha512-TiTt93OPh1OZOsb5B7k96A/ATl2AjIZo+vnzFZ6oHK5FuTk63ByDtxGQpHm+kFETjEWqgkF95M8FRXKR/LEBcw== + dependencies: + chalk "5.3.0" + commander "11.1.0" + debug "4.3.4" + execa "8.0.1" + lilconfig "3.0.0" + listr2 "8.0.1" + micromatch "4.0.5" + pidtree "0.6.0" + string-argv "0.3.2" + yaml "2.3.4" + +listr2@8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/listr2/-/listr2-8.0.1.tgz#4d3f50ae6cec3c62bdf0e94f5c2c9edebd4b9c34" + integrity sha512-ovJXBXkKGfq+CwmKTjluEqFi3p4h8xvkxGQQAQan22YCgef4KZ1mKGjzfGh6PL6AW5Csw0QiQPNuQyH+6Xk3hA== + dependencies: + cli-truncate "^4.0.0" + colorette "^2.0.20" + eventemitter3 "^5.0.1" + log-update "^6.0.0" + rfdc "^1.3.0" + wrap-ansi "^9.0.0" + load-json-file@6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-6.2.0.tgz#5c7770b42cafa97074ca2848707c61662f4251a1" @@ -10302,6 +10375,17 @@ log-symbols@^4.1.0: chalk "^4.1.0" is-unicode-supported "^0.1.0" +log-update@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/log-update/-/log-update-6.0.0.tgz#0ddeb7ac6ad658c944c1de902993fce7c33f5e59" + integrity sha512-niTvB4gqvtof056rRIrTZvjNYE4rCUzO6X/X+kYjd7WFxXeJ0NwEFnRxX6ehkvv3jTwrXnNdtAak5XYZuIyPFw== + dependencies: + ansi-escapes "^6.2.0" + cli-cursor "^4.0.0" + slice-ansi "^7.0.0" + strip-ansi "^7.1.0" + wrap-ansi "^9.0.0" + logkitty@^0.7.1: version "0.7.1" resolved "https://registry.yarnpkg.com/logkitty/-/logkitty-0.7.1.tgz#8e8d62f4085a826e8d38987722570234e33c6aa7" @@ -11609,7 +11693,7 @@ metro@0.80.6, metro@^0.80.3: ws "^7.5.1" yargs "^17.6.2" -micromatch@^4.0.0, micromatch@^4.0.4, micromatch@^4.0.5: +micromatch@4.0.5, micromatch@^4.0.0, micromatch@^4.0.4, micromatch@^4.0.5: version "4.0.5" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== @@ -11644,6 +11728,11 @@ mimic-fn@^2.1.0: resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== +mimic-fn@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-4.0.0.tgz#60a90550d5cb0b239cca65d893b1a53b29871ecc" + integrity sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw== + mimic-response@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" @@ -11908,11 +11997,11 @@ neo-async@^2.5.0, neo-async@^2.6.2: integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== "next@>= 13.5.0 < 15.0.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/next/-/next-14.1.0.tgz#b31c0261ff9caa6b4a17c5af019ed77387174b69" - integrity sha512-wlzrsbfeSU48YQBjZhDzOwhWhGsy+uQycR8bHAOt1LY1bn3zZEcDyHQOEoN3aWzQ8LHCAJ1nqrWCc9XF2+O45Q== + version "14.1.1" + resolved "https://registry.yarnpkg.com/next/-/next-14.1.1.tgz#92bd603996c050422a738e90362dff758459a171" + integrity sha512-McrGJqlGSHeaz2yTRPkEucxQKe5Zq7uPwyeHNmJaZNY4wx9E9QdxmTp310agFRoMuIYgQrCrT3petg13fSVOww== dependencies: - "@next/env" "14.1.0" + "@next/env" "14.1.1" "@swc/helpers" "0.5.2" busboy "1.6.0" caniuse-lite "^1.0.30001579" @@ -11920,15 +12009,15 @@ neo-async@^2.5.0, neo-async@^2.6.2: postcss "8.4.31" styled-jsx "5.1.1" optionalDependencies: - "@next/swc-darwin-arm64" "14.1.0" - "@next/swc-darwin-x64" "14.1.0" - "@next/swc-linux-arm64-gnu" "14.1.0" - "@next/swc-linux-arm64-musl" "14.1.0" - "@next/swc-linux-x64-gnu" "14.1.0" - "@next/swc-linux-x64-musl" "14.1.0" - "@next/swc-win32-arm64-msvc" "14.1.0" - "@next/swc-win32-ia32-msvc" "14.1.0" - "@next/swc-win32-x64-msvc" "14.1.0" + "@next/swc-darwin-arm64" "14.1.1" + "@next/swc-darwin-x64" "14.1.1" + "@next/swc-linux-arm64-gnu" "14.1.1" + "@next/swc-linux-arm64-musl" "14.1.1" + "@next/swc-linux-x64-gnu" "14.1.1" + "@next/swc-linux-x64-musl" "14.1.1" + "@next/swc-win32-arm64-msvc" "14.1.1" + "@next/swc-win32-ia32-msvc" "14.1.1" + "@next/swc-win32-x64-msvc" "14.1.1" nice-try@^1.0.4: version "1.0.5" @@ -12206,6 +12295,13 @@ npm-run-path@^4.0.1: dependencies: path-key "^3.0.0" +npm-run-path@^5.1.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-5.3.0.tgz#e23353d0ebb9317f174e93417e4a4d82d0249e9f" + integrity sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ== + dependencies: + path-key "^4.0.0" + npmlog@^6.0.0, npmlog@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-6.0.2.tgz#c8166017a42f2dea92d6453168dd865186a70830" @@ -12408,6 +12504,13 @@ onetime@^5.1.0, onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" +onetime@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-6.0.0.tgz#7c24c18ed1fd2e9bca4bd26806a33613c77d34b4" + integrity sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ== + dependencies: + mimic-fn "^4.0.0" + open@^6.2.0: version "6.4.0" resolved "https://registry.yarnpkg.com/open/-/open-6.4.0.tgz#5c13e96d0dc894686164f18965ecfe889ecfc8a9" @@ -12703,6 +12806,11 @@ path-key@^3.0.0, path-key@^3.1.0: resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== +path-key@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-4.0.0.tgz#295588dc3aee64154f877adb9d780b81c554bf18" + integrity sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ== + path-parse@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" @@ -12748,6 +12856,11 @@ picomatch@^4.0.1: resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-4.0.1.tgz#68c26c8837399e5819edce48590412ea07f17a07" integrity sha512-xUXwsxNjwTQ8K3GnT4pCJm+xq3RUPQbmkYJTP5aFIfNIvbcc/4MUxgBaaRSZJ6yGJZiGSyYlM6MzwTsRk8SYCg== +pidtree@0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/pidtree/-/pidtree-0.6.0.tgz#90ad7b6d42d5841e69e0a2419ef38f8883aa057c" + integrity sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g== + pify@5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/pify/-/pify-5.0.0.tgz#1f5eca3f5e87ebec28cc6d54a0e4aaf00acc127f" @@ -13423,11 +13536,6 @@ redent@^3.0.0: indent-string "^4.0.0" strip-indent "^3.0.0" -reflect-metadata@^0.1.12: - version "0.1.14" - resolved "https://registry.yarnpkg.com/reflect-metadata/-/reflect-metadata-0.1.14.tgz#24cf721fe60677146bb77eeb0e1f9dece3d65859" - integrity sha512-ZhYeb6nRaXCfhnndflDK8qI6ZQ/YcWZCISRAWICW9XYqMUwjZM9Z0DveWX/ABN01oxSHwVxKQmxeYZSsm0jh5A== - regenerate-unicode-properties@^10.1.0: version "10.1.1" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.1.tgz#6b0e05489d9076b04c436f318d9b067bba459480" @@ -13538,7 +13646,7 @@ resolve.exports@^2.0.0: resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-2.0.2.tgz#f8c934b8e6a13f539e38b7098e2e36134f01e800" integrity sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg== -resolve@^1.10.0, resolve@^1.14.2, resolve@^1.20.0, resolve@^1.22.1, resolve@^1.22.2, resolve@^1.22.4, resolve@^1.3.2: +resolve@^1.10.0, resolve@^1.14.2, resolve@^1.20.0, resolve@^1.22.1, resolve@^1.22.2, resolve@^1.22.4: version "1.22.8" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== @@ -13555,6 +13663,14 @@ restore-cursor@^3.1.0: onetime "^5.1.0" signal-exit "^3.0.2" +restore-cursor@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-4.0.0.tgz#519560a4318975096def6e609d44100edaa4ccb9" + integrity sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg== + dependencies: + onetime "^5.1.0" + signal-exit "^3.0.2" + retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" @@ -13565,6 +13681,11 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== +rfdc@^1.3.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.1.tgz#2b6d4df52dffe8bb346992a10ea9451f24373a8f" + integrity sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg== + rimraf@^2.6.2: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" @@ -13742,7 +13863,7 @@ semantic-ui-react@^0.88.2: react-popper "^1.3.4" shallowequal "^1.1.0" -"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.5.0, semver@^5.6.0: +"semver@2 || 3 || 4 || 5", semver@^5.5.0, semver@^5.6.0: version "5.7.2" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== @@ -13911,7 +14032,7 @@ signal-exit@3.0.7, signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3, s resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== -signal-exit@^4.0.1: +signal-exit@^4.0.1, signal-exit@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== @@ -13995,6 +14116,22 @@ slice-ansi@^2.0.0: astral-regex "^1.0.0" is-fullwidth-code-point "^2.0.0" +slice-ansi@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-5.0.0.tgz#b73063c57aa96f9cd881654b15294d95d285c42a" + integrity sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ== + dependencies: + ansi-styles "^6.0.0" + is-fullwidth-code-point "^4.0.0" + +slice-ansi@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-7.1.0.tgz#cd6b4655e298a8d1bdeb04250a433094b347b9a9" + integrity sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg== + dependencies: + ansi-styles "^6.2.1" + is-fullwidth-code-point "^5.0.0" + smart-buffer@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.2.0.tgz#6e1d71fa4f18c05f7d0ff216dd16a481d0e8d9ae" @@ -14199,6 +14336,11 @@ streamsearch@^1.1.0: resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== +string-argv@0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.3.2.tgz#2b6d0ef24b656274d957d54e0a4bbf6153dc02b6" + integrity sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q== + string-length@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" @@ -14243,6 +14385,15 @@ string-width@^5.0.1, string-width@^5.1.2: emoji-regex "^9.2.2" strip-ansi "^7.0.1" +string-width@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-7.1.0.tgz#d994252935224729ea3719c49f7206dc9c46550a" + integrity sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw== + dependencies: + emoji-regex "^10.3.0" + get-east-asian-width "^1.0.0" + strip-ansi "^7.1.0" + string.prototype.trim@^1.2.8: version "1.2.8" resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz#f9ac6f8af4bd55ddfa8895e6aea92a96395393bd" @@ -14305,7 +14456,7 @@ strip-ansi@^6.0.0, strip-ansi@^6.0.1: dependencies: ansi-regex "^5.0.1" -strip-ansi@^7.0.1: +strip-ansi@^7.0.1, strip-ansi@^7.1.0: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== @@ -14332,6 +14483,11 @@ strip-final-newline@^2.0.0: resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== +strip-final-newline@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-3.0.0.tgz#52894c313fbff318835280aed60ff71ebf12b8fd" + integrity sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw== + strip-indent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" @@ -14685,90 +14841,18 @@ tsconfig-paths@^4.1.2: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== -tslib@1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.0.tgz#e37a86fda8cbbaf23a057f473c9f4dc64e5fc2e8" - integrity sha512-f/qGG2tUkrISBlQZEjEqoZ3B2+npJjIf04H1wuAv9iA8i04Icp+61KRXxFdha22670NJopsZCIjhC3SnjPRKrQ== - -tslib@^1.11.1, tslib@^1.7.1, tslib@^1.8.0, tslib@^1.8.1: +tslib@^1.11.1, tslib@^1.8.1: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslint-config-airbnb@^5.8.0: - version "5.11.2" - resolved "https://registry.yarnpkg.com/tslint-config-airbnb/-/tslint-config-airbnb-5.11.2.tgz#2f3d239fa3923be8e7a4372217a7ed552671528f" - integrity sha512-mUpHPTeeCFx8XARGG/kzYP4dPSOgoCqNiYbGHh09qTH8q+Y1ghsOgaeZKYYQT7IyxMos523z/QBaiv2zKNBcow== - dependencies: - tslint-consistent-codestyle "^1.14.1" - tslint-eslint-rules "^5.4.0" - tslint-microsoft-contrib "~5.2.1" - -tslint-consistent-codestyle@^1.14.1: - version "1.16.0" - resolved "https://registry.yarnpkg.com/tslint-consistent-codestyle/-/tslint-consistent-codestyle-1.16.0.tgz#52348ea899a7e025b37cc6545751c6a566a19077" - integrity sha512-ebR/xHyMEuU36hGNOgCfjGBNYxBPixf0yU1Yoo6s3BrpBRFccjPOmIVaVvQsWAUAMdmfzHOCihVkcaMfimqvHw== - dependencies: - "@fimbul/bifrost" "^0.21.0" - tslib "^1.7.1" - tsutils "^2.29.0" - -tslint-eslint-rules@^5.4.0: - version "5.4.0" - resolved "https://registry.yarnpkg.com/tslint-eslint-rules/-/tslint-eslint-rules-5.4.0.tgz#e488cc9181bf193fe5cd7bfca213a7695f1737b5" - integrity sha512-WlSXE+J2vY/VPgIcqQuijMQiel+UtmXS+4nvK4ZzlDiqBfXse8FAvkNnTcYhnQyOTW5KFM+uRRGXxYhFpuBc6w== - dependencies: - doctrine "0.7.2" - tslib "1.9.0" - tsutils "^3.0.0" - -tslint-microsoft-contrib@~5.2.1: - version "5.2.1" - resolved "https://registry.yarnpkg.com/tslint-microsoft-contrib/-/tslint-microsoft-contrib-5.2.1.tgz#a6286839f800e2591d041ea2800c77487844ad81" - integrity sha512-PDYjvpo0gN9IfMULwKk0KpVOPMhU6cNoT9VwCOLeDl/QS8v8W2yspRpFFuUS7/c5EIH/n8ApMi8TxJAz1tfFUA== - dependencies: - tsutils "^2.27.2 <2.29.0" - -tslint@^5.7.0: - version "5.20.1" - resolved "https://registry.yarnpkg.com/tslint/-/tslint-5.20.1.tgz#e401e8aeda0152bc44dd07e614034f3f80c67b7d" - integrity sha512-EcMxhzCFt8k+/UP5r8waCf/lzmeSyVlqxqMEDQE7rWYiQky8KpIBz1JAoYXfROHrPZ1XXd43q8yQnULOLiBRQg== - dependencies: - "@babel/code-frame" "^7.0.0" - builtin-modules "^1.1.1" - chalk "^2.3.0" - commander "^2.12.1" - diff "^4.0.1" - glob "^7.1.1" - js-yaml "^3.13.1" - minimatch "^3.0.4" - mkdirp "^0.5.1" - resolve "^1.3.2" - semver "^5.3.0" - tslib "^1.8.0" - tsutils "^2.29.0" - -tsutils@3, tsutils@^3.0.0, tsutils@^3.5.0: +tsutils@3: version "3.21.0" resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== dependencies: tslib "^1.8.1" -"tsutils@^2.27.2 <2.29.0": - version "2.28.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-2.28.0.tgz#6bd71e160828f9d019b6f4e844742228f85169a1" - integrity sha512-bh5nAtW0tuhvOJnx1GLRn5ScraRLICGyJV5wJhtRWOLsxW70Kk5tZtpK3O/hW6LDnqKS9mlUMPZj9fEMJ0gxqA== - dependencies: - tslib "^1.8.1" - -tsutils@^2.29.0: - version "2.29.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-2.29.0.tgz#32b488501467acbedd4b85498673a0812aca0b99" - integrity sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA== - dependencies: - tslib "^1.8.1" - tuf-js@^1.1.7: version "1.1.7" resolved "https://registry.yarnpkg.com/tuf-js/-/tuf-js-1.1.7.tgz#21b7ae92a9373015be77dfe0cb282a80ec3bbe43" @@ -15542,6 +15626,15 @@ wrap-ansi@^8.1.0: string-width "^5.0.1" strip-ansi "^7.0.1" +wrap-ansi@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-9.0.0.tgz#1a3dc8b70d85eeb8398ddfb1e4a02cd186e58b3e" + integrity sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q== + dependencies: + ansi-styles "^6.2.1" + string-width "^7.0.0" + strip-ansi "^7.1.0" + wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" @@ -15676,7 +15769,7 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== -yaml@^2.2.1: +yaml@2.3.4, yaml@^2.2.1: version "2.3.4" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.4.tgz#53fc1d514be80aabf386dc6001eb29bf3b7523b2" integrity sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==