diff --git a/.github/workflows/manual-publish.yml b/.github/workflows/manual-publish.yml index 36805ec59f..31a6b8281c 100644 --- a/.github/workflows/manual-publish.yml +++ b/.github/workflows/manual-publish.yml @@ -17,6 +17,8 @@ on: - packages/sdk/vercel - packages/sdk/akamai-base - packages/sdk/akamai-edgekv + - packages/store/node-server-sdk-redis + - packages/store/node-server-sdk-dynamodb prerelease: description: 'Is this a prerelease. If so, then the latest tag will not be updated in npm.' type: boolean diff --git a/.github/workflows/node-dynamodb.yml b/.github/workflows/node-dynamodb.yml new file mode 100644 index 0000000000..4ef8b37cc6 --- /dev/null +++ b/.github/workflows/node-dynamodb.yml @@ -0,0 +1,29 @@ +name: store/node-server-sdk-dynamodb + +on: + push: + branches: [main, rlamb/implement-dynamodb-store] + paths-ignore: + - '**.md' #Do not need to run CI for markdown changes. + pull_request: + branches: [main] + paths-ignore: + - '**.md' + +jobs: + build-test-node-dynamo: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: 16 + registry-url: 'https://registry.npmjs.org' + - run: | + sudo docker run -d -p 8000:8000 amazon/dynamodb-local + - id: shared + name: Shared CI Steps + uses: ./actions/ci + with: + workspace_name: '@launchdarkly/node-server-sdk-dynamodb' + workspace_path: packages/store/node-server-sdk-dynamodb diff --git a/README.md b/README.md index 813913ca34..b5b438c67d 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ This includes shared libraries, used by SDKs and other tools, as well as SDKs. | Store Packages | npm | issues | tests | | ------------------------------------------------------------------------------------- | --------------------------------------------------- | ------------------------------- | ------------------------------------------------------- | | [@launchdarkly/node-server-sdk-redis](packages/store/node-server-sdk-redis/README.md) | [![NPM][node-redis-npm-badge]][node-redis-npm-link] | [Node Redis][node-redis-issues] | [![Actions Status][node-redis-ci-badge]][node-redis-ci] | +| [@launchdarkly/node-server-sdk-dynamodb](packages/store/node-server-sdk-dynamodb/README.md) | [![NPM][node-dynamodb-npm-badge]][node-dynamodb-npm-link] | [Node DynamoDB][node-dynamodb-issues] | [![Actions Status][node-dynamodb-ci-badge]][node-dynamodb-ci] | ## Organization @@ -131,3 +132,9 @@ We encourage pull requests and other contributions from the community. Check out [node-redis-npm-badge]: https://img.shields.io/npm/v/@launchdarkly/node-server-sdk-redis.svg?style=flat-square [node-redis-npm-link]: https://www.npmjs.com/package/@launchdarkly/node-server-sdk-redis [node-redis-issues]: https://github.com/launchdarkly/js-core/issues?q=is%3Aissue+is%3Aopen+label%3A%22package%3A+store%2Fnode-server-sdk-redis%22+ +[//]: # 'store/node-server-sdk-dynamodb' +[node-dynamodb-ci-badge]: https://github.com/launchdarkly/js-core/actions/workflows/node-dynamodb.yml/badge.svg +[node-dynamodb-ci]: https://github.com/launchdarkly/js-core/actions/workflows/node-dynamodb.yml +[node-dynamodb-npm-badge]: https://img.shields.io/npm/v/@launchdarkly/node-server-sdk-dynamodb.svg?style=flat-square +[node-dynamodb-npm-link]: https://www.npmjs.com/package/@launchdarkly/node-server-sdk-dynamodb +[node-dynamodb-issues]: https://github.com/launchdarkly/js-core/issues?q=is%3Aissue+is%3Aopen+label%3A%22package%3A+store%2Fnode-server-sdk-dynamodb%22+ \ No newline at end of file diff --git a/package.json b/package.json index 3e2253447b..e5a3f7904b 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,8 @@ "packages/sdk/akamai-base/example", "packages/sdk/akamai-edgekv", "packages/sdk/akamai-edgekv/example", - "packages/store/node-server-sdk-redis" + "packages/store/node-server-sdk-redis", + "packages/store/node-server-sdk-dynamodb" ], "private": true, "scripts": { diff --git a/packages/store/node-server-sdk-dynamodb/CHANGELOG.md b/packages/store/node-server-sdk-dynamodb/CHANGELOG.md new file mode 100644 index 0000000000..e69de29bb2 diff --git a/packages/store/node-server-sdk-dynamodb/LICENSE b/packages/store/node-server-sdk-dynamodb/LICENSE new file mode 100644 index 0000000000..f33486934f --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/LICENSE @@ -0,0 +1,13 @@ +Copyright 2023 Catamorphic, Co. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/packages/store/node-server-sdk-dynamodb/README.md b/packages/store/node-server-sdk-dynamodb/README.md new file mode 100644 index 0000000000..d3a790430c --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/README.md @@ -0,0 +1,110 @@ +# LaunchDarkly Server-Side SDK for Node.js + +[![NPM][node-dynamodb-npm-badge]][node-dynamodb-npm-link] +[![Actions Status][node-dynamodb-ci-badge]][node-dynamodb-ci] +[![Documentation](https://img.shields.io/static/v1?label=GitHub+Pages&message=API+reference&color=00add8)](https://launchdarkly.github.io/js-core/packages/store/node-server-sdk-dynamodb/docs/) + +This library provides a DynamoDB-backed persistence mechanism (feature store) for the [LaunchDarkly Node.js SDK](https://github.com/launchdarkly/js-core/packages/sdk/server-node), replacing the default in-memory feature store. It uses the AWS SDK for Node.js. +The minimum version of the LaunchDarkly Server-Side SDK for Node for use with this library is 8.0.0. + +This SDK is a beta version and should not be considered ready for production use while this message is visible. + +## LaunchDarkly overview + +[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves over 100 billion feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/home/getting-started) using LaunchDarkly today! + +[![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) + +## Supported Node versions + +This package is compatible with Node.js versions 14 and above. + +## Getting started + +Refer to [Using DynamoDB as a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data/dynamodb#nodejs-server-side). + +## Quick setup + +1. In DynamoDB, create a table which has the following schema: a partition key called "namespace" and a sort key called "key", both with a string type. The LaunchDarkly library does not create the table automatically, because it has no way of knowing what additional properties (such as permissions and throughput) you would want it to have. + +2. Install this package with `npm` or `yarn`: + +`npm install launchdarkly-node-server-sdk-dynamodb --save` + +3. If your application does not already have its own dependency on the `@aws-sdk/client-dynamodb` package, and if it will _not_ be running in AWS Lambda, add `@aws-sdk/client-dynamodb` as well: + +`npm install @aws-sdk/client-dynamodb --save` + +The `launchdarkly-node-server-sdk-dynamodb` package does not provide `@aws-sdk/client-dynamodb` as a transitive dependency, because it is provided automatically by the Lambda runtime and this would unnecessarily increase the size of applications deployed in Lambda. Therefore, if you are not using Lambda you need to provide `@aws-sdk/client-dynamodb` separately. + +4. Import the package: + +```typescript +const { DynamoDBFeatureStoreFactory } = require('launchdarkly-node-server-sdk-dynamodb'); +``` + +5. When configuring your SDK client, add the DynamoDB feature store: + +```typescript +const store = DynamoDBFeatureStoreFactory('YOUR TABLE NAME'); +const config = { featureStore: store }; +const client = LaunchDarkly.init('YOUR SDK KEY', config); +``` + +By default, the DynamoDB client will try to get your AWS credentials and region name from environment variables and/or local configuration files, as described in the AWS SDK documentation. You can also specify any valid [DynamoDB client options](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/DynamoDB.html#constructor-property) like this: + +```typescript +const dynamoDBOptions = { accessKeyId: 'YOUR KEY', secretAccessKey: 'YOUR SECRET' }; +const store = DynamoDBFeatureStoreFactory('YOUR TABLE NAME', { clientOptions: dynamoDBOptions }); +``` + +Alternatively, if you already have a fully configured DynamoDB client object, you can tell LaunchDarkly to use that: + +```typescript +const store = DynamoDBFeatureStoreFactory('YOUR TABLE NAME', { dynamoDBClient: myDynamoDBClientInstance }); +``` + +6. If you are running a [LaunchDarkly Relay Proxy](https://github.com/launchdarkly/ld-relay) instance, or any other process that will pre-populate the DynamoDB table with feature flags from LaunchDarkly, you can use [daemon mode](https://github.com/launchdarkly/ld-relay#daemon-mode), so that the SDK retrieves flag data only from DynamoDB and does not communicate directly with LaunchDarkly. This is controlled by the SDK's `useLdd` option: + +```typescript +const config = { featureStore: store, useLdd: true }; +const client = LaunchDarkly.init('YOUR SDK KEY', config); +``` + +7. If the same DynamoDB table is being shared by SDK clients for different LaunchDarkly environments, set the `prefix` option to a different short string for each one to keep the keys from colliding: + +```typescript +const store = DynamoDBFeatureStoreFactory('YOUR TABLE NAME', { prefix: 'env1' }); +``` + +## Caching behavior + +To reduce traffic to DynamoDB, there is an optional in-memory cache that retains the last known data for a configurable amount of time. This is on by default; to turn it off (and guarantee that the latest feature flag data will always be retrieved from DynamoDB for every flag evaluation), configure the store as follows: + +```typescript +const factory = DynamoDBFeatureStoreFactory({ cacheTTL: 0 }); +``` + +## Contributing + +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this SDK. + +## About LaunchDarkly + +- LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: + - Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. + - Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). + - Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. + - Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. +- LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Check out [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. +- Explore LaunchDarkly + - [launchdarkly.com](https://www.launchdarkly.com/ 'LaunchDarkly Main Website') for more information + - [docs.launchdarkly.com](https://docs.launchdarkly.com/ 'LaunchDarkly Documentation') for our documentation and SDK reference guides + - [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ 'LaunchDarkly API Documentation') for our API documentation + - [blog.launchdarkly.com](https://blog.launchdarkly.com/ 'LaunchDarkly Blog Documentation') for the latest product updates + +[node-dynamodb-ci-badge]: https://github.com/launchdarkly/js-core/actions/workflows/node-dynamodb.yml/badge.svg +[node-dynamodb-ci]: https://github.com/launchdarkly/js-core/actions/workflows/node-dynamodb.yml + +[node-dynamodb-npm-badge]: https://img.shields.io/npm/v/@launchdarkly/node-server-sdk-dynamodb.svg?style=flat-square +[node-dynamodb-npm-link]: https://www.npmjs.com/package/@launchdarkly/node-server-sdk-dynamodb diff --git a/packages/store/node-server-sdk-dynamodb/__tests__/DynamoDBBigSegmentStore.test.ts b/packages/store/node-server-sdk-dynamodb/__tests__/DynamoDBBigSegmentStore.test.ts new file mode 100644 index 0000000000..90b06d115b --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/__tests__/DynamoDBBigSegmentStore.test.ts @@ -0,0 +1,136 @@ +import { DynamoDBClient, PutItemCommand, UpdateItemCommand } from '@aws-sdk/client-dynamodb'; +import { interfaces } from '@launchdarkly/node-server-sdk'; +import DynamoDBBigSegmentStore, { + KEY_METADATA, + KEY_USER_DATA, + ATTR_EXCLUDED, + ATTR_INCLUDED, + ATTR_SYNC_ON, +} from '../src/DynamoDBBigSegmentStore'; +import clearPrefix from './clearPrefix'; +import setupTable from './setupTable'; +import LDDynamoDBOptions from '../src/LDDynamoDBOptions'; +import { numberValue, stringValue } from '../src/Value'; + +const FAKE_HASH = 'userhash'; + +const DEFAULT_TABLE_NAME = 'test-table-big-segments'; + +const DEFAULT_CLIENT_OPTIONS: LDDynamoDBOptions = { + clientOptions: { + endpoint: 'http://localhost:8000', + region: 'us-west-2', + credentials: { accessKeyId: 'fake', secretAccessKey: 'fake' }, + }, +}; + +async function setMetadata( + prefix: string | undefined, + metadata: interfaces.BigSegmentStoreMetadata +): Promise { + const client = new DynamoDBClient(DEFAULT_CLIENT_OPTIONS.clientOptions!); + const key = prefix ? `${prefix}:${KEY_METADATA}` : KEY_METADATA; + await client.send( + new PutItemCommand({ + TableName: DEFAULT_TABLE_NAME, + Item: { + namespace: stringValue(key), + key: stringValue(key), + [ATTR_SYNC_ON]: numberValue(metadata.lastUpToDate!), + }, + }) + ); + client.destroy(); +} + +async function setSegments( + prefix: string | undefined, + userHashKey: string, + included: string[], + excluded: string[] +): Promise { + const client = new DynamoDBClient(DEFAULT_CLIENT_OPTIONS.clientOptions!); + const key = prefix ? `${prefix}:${KEY_USER_DATA}` : KEY_USER_DATA; + + async function addToSet(attrName: string, values: string[]) { + await client.send( + new UpdateItemCommand({ + TableName: DEFAULT_TABLE_NAME, + Key: { + namespace: stringValue(key), + key: stringValue(userHashKey), + }, + UpdateExpression: `ADD ${attrName} :value`, + ExpressionAttributeValues: { + ':value': { SS: values }, + }, + }) + ); + } + + if (included && included.length) { + await addToSet(ATTR_INCLUDED, included); + } + + if (excluded && excluded.length) { + await addToSet(ATTR_EXCLUDED, excluded); + } + + client.destroy(); +} + +describe.each([undefined, 'app1'])('given a redis big segment store', (prefixParam) => { + let store: DynamoDBBigSegmentStore; + + beforeEach(async () => { + await setupTable(DEFAULT_TABLE_NAME, DEFAULT_CLIENT_OPTIONS.clientOptions!); + await clearPrefix(DEFAULT_TABLE_NAME, prefixParam); + // Use param directly to test undefined. + store = new DynamoDBBigSegmentStore(DEFAULT_TABLE_NAME, { + ...DEFAULT_CLIENT_OPTIONS, + prefix: prefixParam, + }); + }); + + afterEach(async () => { + store.close(); + }); + + it('can get populated meta data', async () => { + const expected = { lastUpToDate: 1234567890 }; + await setMetadata(prefixParam, expected); + const meta = await store.getMetadata(); + expect(meta).toEqual(expected); + }); + + it('can get metadata when not populated', async () => { + const meta = await store.getMetadata(); + expect(meta?.lastUpToDate).toBeUndefined(); + }); + + it('can get user membership for a user which has no membership', async () => { + const membership = await store.getUserMembership(FAKE_HASH); + expect(membership).toBeUndefined(); + }); + + it('can get membership for a user that is only included', async () => { + await setSegments(prefixParam, FAKE_HASH, ['key1', 'key2'], []); + + const membership = await store.getUserMembership(FAKE_HASH); + expect(membership).toEqual({ key1: true, key2: true }); + }); + + it('can get membership for a user that is only excluded', async () => { + await setSegments(prefixParam, FAKE_HASH, [], ['key1', 'key2']); + + const membership = await store.getUserMembership(FAKE_HASH); + expect(membership).toEqual({ key1: false, key2: false }); + }); + + it('can get membership for a user that is included and excluded', async () => { + await setSegments(prefixParam, FAKE_HASH, ['key1', 'key2'], ['key2', 'key3']); + + const membership = await store.getUserMembership(FAKE_HASH); + expect(membership).toEqual({ key1: true, key2: true, key3: false }); // include of key2 overrides exclude + }); +}); diff --git a/packages/store/node-server-sdk-dynamodb/__tests__/DynamoDBCore.test.ts b/packages/store/node-server-sdk-dynamodb/__tests__/DynamoDBCore.test.ts new file mode 100644 index 0000000000..53b8091a51 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/__tests__/DynamoDBCore.test.ts @@ -0,0 +1,312 @@ +import { interfaces } from '@launchdarkly/node-server-sdk'; +import DynamoDBCore, { calculateSize } from '../src/DynamoDBCore'; +import DynamoDBClientState from '../src/DynamoDBClientState'; +import clearPrefix from './clearPrefix'; +import LDDynamoDBOptions from '../src/LDDynamoDBOptions'; +import setupTable from './setupTable'; + +const DEFAULT_TABLE_NAME = 'test-table'; + +const featuresKind = { namespace: 'features', deserialize: (data: string) => JSON.parse(data) }; +const segmentsKind = { namespace: 'segments', deserialize: (data: string) => JSON.parse(data) }; + +const dataKind = { + features: featuresKind, + segments: segmentsKind, +}; + +const DEFAULT_CLIENT_OPTIONS: LDDynamoDBOptions = { + clientOptions: { + endpoint: 'http://localhost:8000', + region: 'us-west-2', + credentials: { accessKeyId: 'fake', secretAccessKey: 'fake' }, + }, +}; + +function promisify(method: (callback: (val: T) => void) => void): Promise { + return new Promise((resolve) => { + method((val: T) => { + resolve(val); + }); + }); +} + +type UpsertResult = { + err: Error | undefined; + updatedDescriptor: interfaces.SerializedItemDescriptor | undefined; +}; + +class AsyncCoreFacade { + constructor(private readonly core: DynamoDBCore) {} + + init(allData: interfaces.KindKeyedStore): Promise { + return promisify((cb) => this.core.init(allData, cb)); + } + + get( + kind: interfaces.PersistentStoreDataKind, + key: string + ): Promise { + return promisify((cb) => this.core.get(kind, key, cb)); + } + + getAll( + kind: interfaces.PersistentStoreDataKind + ): Promise[] | undefined> { + return promisify((cb) => this.core.getAll(kind, cb)); + } + + upsert( + kind: interfaces.PersistentStoreDataKind, + key: string, + descriptor: interfaces.SerializedItemDescriptor + ): Promise { + return new Promise((resolve) => { + this.core.upsert(kind, key, descriptor, (err, updatedDescriptor) => { + resolve({ err, updatedDescriptor }); + }); + }); + } + + initialized(): Promise { + return promisify((cb) => this.core.initialized(cb)); + } + + close(): void { + this.core.close(); + } + + getDescription(): string { + return this.core.getDescription(); + } +} + +describe('given an empty store', () => { + let core: DynamoDBCore; + let facade: AsyncCoreFacade; + + beforeEach(async () => { + await setupTable(DEFAULT_TABLE_NAME, DEFAULT_CLIENT_OPTIONS.clientOptions!); + await clearPrefix(DEFAULT_TABLE_NAME, 'launchdarkly'); + core = new DynamoDBCore( + DEFAULT_TABLE_NAME, + new DynamoDBClientState(DEFAULT_CLIENT_OPTIONS), + undefined + ); + facade = new AsyncCoreFacade(core); + }); + + afterEach(() => { + core.close(); + }); + + it('is initialized after calling init()', async () => { + await facade.init([]); + const initialized = await facade.initialized(); + expect(initialized).toBeTruthy(); + }); + + it('completely replaces previous data when calling init()', async () => { + const flags = [ + { key: 'first', item: { version: 1, serializedItem: `{"version":1}`, deleted: false } }, + { key: 'second', item: { version: 1, serializedItem: `{"version":1}`, deleted: false } }, + ]; + const segments = [ + { key: 'first', item: { version: 2, serializedItem: `{"version":2}`, deleted: false } }, + ]; + + await facade.init([ + { key: dataKind.features, item: flags }, + { key: dataKind.segments, item: segments }, + ]); + + const items1 = await facade.getAll(dataKind.features); + const items2 = await facade.getAll(dataKind.segments); + + // Reading from the store will not maintain the version. + expect(items1).toEqual([ + { + key: 'first', + item: { version: 1, deleted: false, serializedItem: '{"version":1}' }, + }, + { + key: 'second', + item: { version: 1, deleted: false, serializedItem: '{"version":1}' }, + }, + ]); + expect(items2).toEqual([ + { + key: 'first', + item: { version: 2, deleted: false, serializedItem: '{"version":2}' }, + }, + ]); + + const newFlags = [ + { key: 'first', item: { version: 2, serializedItem: `{"version":2}`, deleted: false } }, + ]; + const newSegments = [ + { key: 'first', item: { version: 3, serializedItem: `{"version":3}`, deleted: false } }, + ]; + + await facade.init([ + { key: dataKind.features, item: newFlags }, + { key: dataKind.segments, item: newSegments }, + ]); + + const items3 = await facade.getAll(dataKind.features); + const items4 = await facade.getAll(dataKind.segments); + + expect(items3).toEqual([ + { + key: 'first', + item: { version: 2, deleted: false, serializedItem: '{"version":2}' }, + }, + ]); + expect(items4).toEqual([ + { + key: 'first', + item: { version: 3, deleted: false, serializedItem: '{"version":3}' }, + }, + ]); + }); +}); + +describe('given a store with basic data', () => { + let core: DynamoDBCore; + let facade: AsyncCoreFacade; + + const feature1 = { key: 'foo', version: 10 }; + const feature2 = { key: 'bar', version: 10 }; + + beforeEach(async () => { + await clearPrefix(DEFAULT_TABLE_NAME, 'launchdarkly'); + core = new DynamoDBCore( + DEFAULT_TABLE_NAME, + new DynamoDBClientState(DEFAULT_CLIENT_OPTIONS), + undefined + ); + const flags = [ + { + key: 'foo', + item: { version: 10, serializedItem: JSON.stringify(feature1), deleted: false }, + }, + { + key: 'bar', + item: { version: 10, serializedItem: JSON.stringify(feature2), deleted: false }, + }, + ]; + const segments: interfaces.KeyedItem[] = []; + + facade = new AsyncCoreFacade(core); + + await facade.init([ + { key: dataKind.features, item: flags }, + { key: dataKind.segments, item: segments }, + ]); + }); + + afterEach(() => { + core.close(); + }); + + it('gets a feature that exists', async () => { + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual({ + version: 10, + deleted: false, + serializedItem: JSON.stringify(feature1), + }); + }); + + it('does not get nonexisting feature', async () => { + const result = await facade.get(dataKind.features, 'biz'); + expect(result).toBeUndefined(); + }); + + it('gets all features', async () => { + const result = await facade.getAll(dataKind.features); + expect(result).toEqual( + expect.arrayContaining([ + { + key: 'foo', + item: { version: 10, serializedItem: JSON.stringify(feature1), deleted: false }, + }, + { + key: 'bar', + item: { version: 10, serializedItem: JSON.stringify(feature2), deleted: false }, + }, + ]) + ); + }); + + it('upserts with newer version', async () => { + const newVer = { key: feature1.key, version: feature1.version + 1 }; + const descriptor = { + version: newVer.version, + deleted: false, + serializedItem: JSON.stringify(newVer), + }; + + await facade.upsert(dataKind.features, newVer.key, descriptor); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual(descriptor); + }); + + it('does not upsert with older version', async () => { + const oldVer = { key: feature1.key, version: feature1.version - 1 }; + const descriptor = { + version: oldVer.version, + deleted: false, + serializedItem: JSON.stringify(oldVer), + }; + await facade.upsert(dataKind.features, oldVer.key, descriptor); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual({ + version: 10, + deleted: false, + serializedItem: `{"key":"foo","version":10}`, + }); + }); + + it('upserts new feature', async () => { + const newFeature = { key: 'biz', version: 99 }; + const descriptor = { + version: newFeature.version, + deleted: false, + serializedItem: JSON.stringify(newFeature), + }; + await facade.upsert(dataKind.features, newFeature.key, descriptor); + const result = await facade.get(dataKind.features, newFeature.key); + expect(result).toEqual(descriptor); + }); +}); + +it('it can calculate size', () => { + const stringPayload = `{"key":"foo","version":10}`; + + expect( + calculateSize({ + test: { S: stringPayload }, + }) + ).toEqual(100 + 'test'.length + stringPayload.length); + + expect( + calculateSize({ + test: { N: '14' }, + }) + ).toEqual(100 + 'test'.length + 2); + + expect( + calculateSize({ + test: { BOOL: true }, + }) + ).toEqual(100 + 'test'.length + 1); + + expect( + calculateSize({ + bool: { BOOL: true }, + string: { S: stringPayload }, + number: { N: '14' }, + }) + ).toEqual(100 + 'test'.length + 'string'.length + 'number'.length + stringPayload.length + 2 + 1); +}); diff --git a/packages/store/node-server-sdk-dynamodb/__tests__/DynamoDBStore.test.ts b/packages/store/node-server-sdk-dynamodb/__tests__/DynamoDBStore.test.ts new file mode 100644 index 0000000000..0fc022a46a --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/__tests__/DynamoDBStore.test.ts @@ -0,0 +1,190 @@ +import { AsyncStoreFacade } from '@launchdarkly/node-server-sdk'; +import DynamoDBFeatureStore from '../src/DynamoDBFeatureStore'; +import clearPrefix from './clearPrefix'; +import setupTable from './setupTable'; +import LDDynamoDBOptions from '../src/LDDynamoDBOptions'; + +const dataKind = { + features: { namespace: 'features' }, + segments: { namespace: 'segments' }, +}; + +const DEFAULT_CLIENT_OPTIONS: LDDynamoDBOptions = { + clientOptions: { + endpoint: 'http://localhost:8000', + region: 'us-west-2', + credentials: { accessKeyId: 'fake', secretAccessKey: 'fake' }, + }, +}; + +describe.each([ + ['test-table-1', undefined], + ['test-table-2', 'testing'], +])('given an empty store', (table, prefixParam) => { + let store: DynamoDBFeatureStore; + let facade: AsyncStoreFacade; + + beforeEach(async () => { + await setupTable(table, DEFAULT_CLIENT_OPTIONS.clientOptions!); + await clearPrefix(table, prefixParam); + store = new DynamoDBFeatureStore( + table, + { ...DEFAULT_CLIENT_OPTIONS, prefix: prefixParam }, + undefined + ); + facade = new AsyncStoreFacade(store); + }); + + afterEach(() => { + store.close(); + }); + + it(`is initialized after calling init() ${table}-${prefixParam}`, async () => { + await facade.init({}); + const initialized = await facade.initialized(); + expect(initialized).toBeTruthy(); + }); + + it(`completely replaces previous data when calling init() ${table}-${prefixParam}`, async () => { + const flags = { + first: { key: 'first', version: 1 }, + second: { key: 'second', version: 1 }, + }; + const segments = { first: { key: 'first', version: 2 } }; + const initData1 = { + features: flags, + segments, + }; + + await facade.init(initData1); + const items1 = await facade.all(dataKind.features); + expect(items1).toEqual(flags); + const items2 = await facade.all(dataKind.segments); + expect(items2).toEqual(segments); + + const newFlags = { first: { key: 'first', version: 3 } }; + const newSegments = { first: { key: 'first', version: 4 } }; + const initData2 = { + features: newFlags, + segments: newSegments, + }; + + await facade.init(initData2); + const items3 = await facade.all(dataKind.features); + expect(items3).toEqual(newFlags); + const items4 = await facade.all(dataKind.segments); + expect(items4).toEqual(newSegments); + }); +}); + +describe.each([ + ['test-table-1', undefined], + ['test-table-2', 'testing'], +])('given an empty store', (table, prefixParam) => { + let store: DynamoDBFeatureStore; + let facade: AsyncStoreFacade; + + const feature1 = { key: 'foo', version: 10 }; + const feature2 = { key: 'bar', version: 10 }; + beforeEach(async () => { + await setupTable(table, DEFAULT_CLIENT_OPTIONS.clientOptions!); + await clearPrefix(table, prefixParam); + store = new DynamoDBFeatureStore( + table, + { ...DEFAULT_CLIENT_OPTIONS, prefix: prefixParam }, + undefined + ); + facade = new AsyncStoreFacade(store); + await facade.init({ + features: { + foo: feature1, + bar: feature2, + }, + segments: {}, + }); + }); + + afterEach(() => { + store.close(); + }); + + it('gets a feature that exists', async () => { + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual(feature1); + }); + + it('does not get nonexisting feature', async () => { + const result = await facade.get(dataKind.features, 'biz'); + expect(result).toBeNull(); + }); + + it('gets all features', async () => { + const result = await facade.all(dataKind.features); + expect(result).toEqual({ + foo: feature1, + bar: feature2, + }); + }); + + it('upserts with newer version', async () => { + const newVer = { key: feature1.key, version: feature1.version + 1 }; + + await facade.upsert(dataKind.features, newVer); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual(newVer); + }); + + it('does not upsert with older version', async () => { + const oldVer = { key: feature1.key, version: feature1.version - 1 }; + await facade.upsert(dataKind.features, oldVer); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual(feature1); + }); + + it('upserts new feature', async () => { + const newFeature = { key: 'biz', version: 99 }; + await facade.upsert(dataKind.features, newFeature); + const result = await facade.get(dataKind.features, newFeature.key); + expect(result).toEqual(newFeature); + }); + + it('handles upsert race condition within same client correctly', async () => { + const ver1 = { key: feature1.key, version: feature1.version + 1 }; + const ver2 = { key: feature1.key, version: feature1.version + 2 }; + const promises: Promise[] = []; + // Deliberately do not wait for the first upsert to complete before starting the second, + // so their transactions will be interleaved unless we're correctly serializing updates + promises.push(facade.upsert(dataKind.features, ver2)); + promises.push(facade.upsert(dataKind.features, ver1)); + + // Now wait until both have completed + await Promise.all(promises); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual(ver2); + }); + + it('deletes with newer version', async () => { + await facade.delete(dataKind.features, feature1.key, feature1.version + 1); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toBe(null); + }); + + it('does not delete with older version', async () => { + await facade.delete(dataKind.features, feature1.key, feature1.version - 1); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).not.toBe(null); + }); + + it('allows deleting unknown feature', async () => { + await facade.delete(dataKind.features, 'biz', 99); + const result = await facade.get(dataKind.features, 'biz'); + expect(result).toBe(null); + }); + + it('does not upsert older version after delete', async () => { + await facade.delete(dataKind.features, feature1.key, feature1.version + 1); + await facade.upsert(dataKind.features, feature1); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toBe(null); + }); +}); diff --git a/packages/store/node-server-sdk-dynamodb/__tests__/clearPrefix.ts b/packages/store/node-server-sdk-dynamodb/__tests__/clearPrefix.ts new file mode 100644 index 0000000000..535451f18b --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/__tests__/clearPrefix.ts @@ -0,0 +1,42 @@ +import { DynamoDBClient, paginateScan, WriteRequest } from '@aws-sdk/client-dynamodb'; +import DynamoDBClientState from '../src/DynamoDBClientState'; + +export default async function clearPrefix(table: string, prefix?: string) { + const actualPrefix = prefix ? `${prefix}:` : ''; + + const client = new DynamoDBClient({ + endpoint: 'http://localhost:8000', + region: 'us-west-2', + credentials: { accessKeyId: 'fake', secretAccessKey: 'fake' }, + }); + + const state = new DynamoDBClientState({ dynamoDBClient: client }); + + const deleteOps: WriteRequest[] = []; + + // Using a generator here is a substantial ergonomic improvement and this is a test file. + // eslint-disable-next-line no-restricted-syntax + for await (const page of paginateScan( + { client }, + { + TableName: table, + } + )) { + page?.Items?.forEach((item) => { + if (item?.namespace?.S?.startsWith(actualPrefix)) { + deleteOps.push({ + DeleteRequest: { + Key: { + namespace: item.namespace, + key: item.key, + }, + }, + }); + } + }); + } + + if (deleteOps.length) { + await state.batchWrite(table, deleteOps); + } +} diff --git a/packages/store/node-server-sdk-dynamodb/__tests__/setupTable.ts b/packages/store/node-server-sdk-dynamodb/__tests__/setupTable.ts new file mode 100644 index 0000000000..2329e2c890 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/__tests__/setupTable.ts @@ -0,0 +1,27 @@ +import { CreateTableCommand, DynamoDBClient, DynamoDBClientConfig } from '@aws-sdk/client-dynamodb'; + +export default async function setupTable(tableName: string, options: DynamoDBClientConfig) { + const client = new DynamoDBClient(options); + try { + await client.send( + new CreateTableCommand({ + TableName: tableName, + AttributeDefinitions: [ + { AttributeName: 'namespace', AttributeType: 'S' }, + { AttributeName: 'key', AttributeType: 'S' }, + ], + KeySchema: [ + { AttributeName: 'namespace', KeyType: 'HASH' }, + { AttributeName: 'key', KeyType: 'RANGE' }, // Sort key + ], + ProvisionedThroughput: { + ReadCapacityUnits: 10, + WriteCapacityUnits: 10, + }, + }) + ); + } catch (err) { + // Table probably existed. + } + client.destroy(); +} diff --git a/packages/store/node-server-sdk-dynamodb/jest.config.js b/packages/store/node-server-sdk-dynamodb/jest.config.js new file mode 100644 index 0000000000..f106eb3bc9 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/jest.config.js @@ -0,0 +1,7 @@ +module.exports = { + transform: { '^.+\\.ts?$': 'ts-jest' }, + testMatch: ['**/__tests__/**/*test.ts?(x)'], + testEnvironment: 'node', + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: ['src/**/*.ts'], +}; diff --git a/packages/store/node-server-sdk-dynamodb/package.json b/packages/store/node-server-sdk-dynamodb/package.json new file mode 100644 index 0000000000..97be575053 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/package.json @@ -0,0 +1,56 @@ +{ + "name": "@launchdarkly/node-server-sdk-dynamodb", + "version": "0.0.0", + "description": "DynamoDB-backed feature store for the LaunchDarkly Server-Side SDK for Node.js", + "homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/store/node-server-sdk-dynamodb", + "repository": { + "type": "git", + "url": "https://github.com/launchdarkly/js-core.git" + }, + "type": "commonjs", + "main": "./dist/src/index.js", + "types": "./dist/src/index.d.ts", + "files": [ + "dist" + ], + "keywords": [ + "launchdarkly", + "analytics", + "client" + ], + "license": "Apache-2.0", + "scripts": { + "clean": "npx tsc --build --clean", + "test": "npx jest --ci --runInBand", + "build": "npx tsc", + "lint": "npx eslint . --ext .ts", + "lint:fix": "yarn run lint --fix", + "doc": "../../../scripts/build-doc.sh ." + }, + "dependencies": { + "ioredis": "^5.3.2" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "3.348.0", + "@launchdarkly/node-server-sdk": "0.4.4" + }, + "devDependencies": { + "@aws-sdk/client-dynamodb": "3.348.0", + "@launchdarkly/node-server-sdk": "0.4.4", + "@types/jest": "^29.4.0", + "@typescript-eslint/eslint-plugin": "^5.22.0", + "@typescript-eslint/parser": "^5.22.0", + "eslint": "^8.14.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-airbnb-typescript": "^17.0.0", + "eslint-config-prettier": "^8.7.0", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-prettier": "^4.2.1", + "jest": "^29.5.0", + "launchdarkly-js-test-helpers": "^2.2.0", + "prettier": "^2.8.4", + "ts-jest": "^29.0.5", + "typedoc": "0.23.26", + "typescript": "^4.6.3" + } +} diff --git a/packages/store/node-server-sdk-dynamodb/src/DynamoDBBigSegmentStore.ts b/packages/store/node-server-sdk-dynamodb/src/DynamoDBBigSegmentStore.ts new file mode 100644 index 0000000000..5eed69b5e5 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/src/DynamoDBBigSegmentStore.ts @@ -0,0 +1,92 @@ +import { LDLogger, interfaces } from '@launchdarkly/node-server-sdk'; +import LDDynamoDBOptions from './LDDynamoDBOptions'; +import DynamoDBClientState from './DynamoDBClientState'; +import { stringValue } from './Value'; + +/** + * Exported for testing. + * @internal + */ +export const KEY_METADATA = 'big_segments_metadata'; + +/** + * Exported for testing. + * @internal + */ +export const KEY_USER_DATA = 'big_segments_user'; + +/** + * Exported for testing. + * @internal + */ +export const ATTR_SYNC_ON = 'synchronizedOn'; + +/** + * Exported for testing. + * @internal + */ +export const ATTR_INCLUDED = 'included'; + +/** + * Exported for testing. + * @internal + */ +export const ATTR_EXCLUDED = 'excluded'; + +export default class DynamoDBBigSegmentStore implements interfaces.BigSegmentStore { + private state: DynamoDBClientState; + + // Logger is not currently used, but is included to reduce the chance of a + // compatibility break to add a log. + // eslint-disable-next-line @typescript-eslint/no-unused-vars + constructor( + private readonly tableName: string, + options?: LDDynamoDBOptions, + private readonly logger?: LDLogger + ) { + this.state = new DynamoDBClientState(options); + } + + async getMetadata(): Promise { + const key = this.state.prefixedKey(KEY_METADATA); + const data = await this.state.get(this.tableName, { + namespace: stringValue(key), + key: stringValue(key), + }); + if (data) { + const attr = data[ATTR_SYNC_ON]; + if (attr && attr.N) { + return { lastUpToDate: parseInt(attr.N!, 10) }; + } + } + return {}; + } + + async getUserMembership( + userHash: string + ): Promise { + const data = await this.state.get(this.tableName, { + namespace: stringValue(this.state.prefixedKey(KEY_USER_DATA)), + key: stringValue(userHash), + }); + if (data) { + const excludedRefs = data[ATTR_EXCLUDED]; + const includedRefs = data[ATTR_INCLUDED]; + + const membership: interfaces.BigSegmentStoreMembership = {}; + + excludedRefs?.SS?.forEach((ref) => { + membership[ref] = false; + }); + includedRefs?.SS?.forEach((ref) => { + membership[ref] = true; + }); + return membership; + } + return undefined; + } + + close(): void { + this.state.close(); + } +} diff --git a/packages/store/node-server-sdk-dynamodb/src/DynamoDBBigSegmentStoreFactory.ts b/packages/store/node-server-sdk-dynamodb/src/DynamoDBBigSegmentStoreFactory.ts new file mode 100644 index 0000000000..b4493514e9 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/src/DynamoDBBigSegmentStoreFactory.ts @@ -0,0 +1,22 @@ +import { LDOptions, interfaces } from '@launchdarkly/node-server-sdk'; +import LDDynamoDBOptions from './LDDynamoDBOptions'; +import DynamoDBBigSegmentStore from './DynamoDBBigSegmentStore'; + +/** + * Configures a big segment store factory backed by a DynamoDB instance. + * + * "Big segments" are a specific type of user segments. For more information, read the + * LaunchDarkly documentation about user segments: https://docs.launchdarkly.com/home/users/segments + * + * @param tableName The table name in DynamoDB (required). The table must already exist. + * See: https://docs.launchdarkly.com/sdk/features/storing-data/dynamodb + * @param options Optional configuration (required), please refer to {@link LDDynamoDBOptions}. + * + * @returns A function which creates big segment stores based on the provided config. + */ +export default function DynamoDBBigSegmentStoreFactory( + tableName: string, + options?: LDDynamoDBOptions +): (config: LDOptions) => interfaces.BigSegmentStore { + return (config: LDOptions) => new DynamoDBBigSegmentStore(tableName, options, config.logger); +} diff --git a/packages/store/node-server-sdk-dynamodb/src/DynamoDBClientState.ts b/packages/store/node-server-sdk-dynamodb/src/DynamoDBClientState.ts new file mode 100644 index 0000000000..57d2af34ef --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/src/DynamoDBClientState.ts @@ -0,0 +1,125 @@ +import { + AttributeValue, + BatchWriteItemCommand, + ConditionalCheckFailedException, + DynamoDBClient, + GetItemCommand, + PutItemCommand, + PutItemCommandInput, + QueryCommandInput, + WriteRequest, + paginateQuery, +} from '@aws-sdk/client-dynamodb'; +import LDDynamoDBOptions from './LDDynamoDBOptions'; + +// Unlike some other database integrations where the key prefix is mandatory and has +// a default value, in DynamoDB it is fine to not have a prefix. If there is one, we +// prepend it to keys with a ':' separator. +const DEFAULT_PREFIX = ''; + +// BatchWrite can only accept 25 items at a time, so split up the writes into batches of 25. +const WRITE_BATCH_SIZE = 25; + +/** + * Class for managing the state of a dynamodb client. + * + * Used for the dynamodb persistent store as well as the dynamodb big segment store. + * + * @internal + */ +export default class DynamoDBClientState { + // This will include the ':' if a prefix is set. + private prefix: string; + + private client: DynamoDBClient; + + private owned: boolean; + + constructor(options?: LDDynamoDBOptions) { + this.prefix = options?.prefix ? `${options!.prefix}:` : DEFAULT_PREFIX; + + // We track if we own the client so that we can destroy clients that we own. + if (options?.dynamoDBClient) { + this.client = options.dynamoDBClient; + this.owned = false; + } else if (options?.clientOptions) { + this.client = new DynamoDBClient(options.clientOptions); + this.owned = true; + } else { + this.client = new DynamoDBClient({}); + this.owned = true; + } + } + + /** + * Get a key with prefix prepended. + * @param key The key to prefix. + * @returns The prefixed key. + */ + prefixedKey(key: string): string { + return `${this.prefix}${key}`; + } + + async query(params: QueryCommandInput): Promise[]> { + const records: Record[] = []; + // Using a generator here is a substantial ergonomic improvement. + // eslint-disable-next-line no-restricted-syntax + for await (const page of paginateQuery({ client: this.client }, params)) { + if (page.Items) { + records.push(...page.Items); + } + } + return records; + } + + async batchWrite(table: string, params: WriteRequest[]) { + const batches: WriteRequest[][] = []; + // Split into batches of at most 25 commands. + while (params.length) { + batches.push(params.splice(0, WRITE_BATCH_SIZE)); + } + + // Execute all the batches and wait for them to complete. + await Promise.all( + batches.map((batch) => + this.client.send( + new BatchWriteItemCommand({ + RequestItems: { [table]: batch }, + }) + ) + ) + ); + } + + async get( + table: string, + key: Record + ): Promise | undefined> { + const res = await this.client.send( + new GetItemCommand({ + TableName: table, + Key: key, + }) + ); + + return res.Item; + } + + async put(params: PutItemCommandInput): Promise { + try { + await this.client.send(new PutItemCommand(params)); + } catch (err) { + // If we couldn't upsert because of the version, then that is fine. + // Otherwise we return failure. + if (!(err instanceof ConditionalCheckFailedException)) { + throw err; + } + } + } + + close() { + if (this.owned) { + this.client.destroy(); + } + } +} diff --git a/packages/store/node-server-sdk-dynamodb/src/DynamoDBCore.ts b/packages/store/node-server-sdk-dynamodb/src/DynamoDBCore.ts new file mode 100644 index 0000000000..b18c39a67f --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/src/DynamoDBCore.ts @@ -0,0 +1,283 @@ +import { LDLogger, interfaces } from '@launchdarkly/node-server-sdk'; +import { AttributeValue, QueryCommandInput, WriteRequest } from '@aws-sdk/client-dynamodb'; +import DynamoDBClientState from './DynamoDBClientState'; +import { stringValue, numberValue, boolValue } from './Value'; + +// We won't try to store items whose total size exceeds this. The DynamoDB documentation says +// only "400KB", which probably means 400*1024, but to avoid any chance of trying to store a +// too-large item we are rounding it down. +const DYNAMODB_MAX_SIZE = 400000; + +/** + * Exported for testing. + * @internal + */ +export function calculateSize(item: Record, logger?: LDLogger) { + return Object.entries(item).reduce((prev, [key, value]) => { + // see: https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/CapacityUnitCalculations.html + if (value.S) { + return prev + key.length + Buffer.byteLength(value.S); + } + if (value.N) { + // Numeric values will be over-estimated compared to the DynamoDB size calculation. + return prev + key.length + Buffer.byteLength(value.N); + } + if (value.BOOL) { + return prev + key.length + 1; + } + logger?.warn('Unrecognized type in size calculation'); + return prev; + }, 100); +} + +/** + * Internal implementation of the DynamoDB feature store. + * + * Implementation notes: + * + * Feature flags, segments, and any other kind of entity the LaunchDarkly client may wish + * to store, are all put in the same table. The only two required attributes are "key" (which + * is present in all store-able entities) and "namespace" (a parameter from the client that is + * used to disambiguate between flags and segments). + * + * Because of DynamoDB's restrictions on attribute values (e.g. empty strings are not + * allowed), the standard DynamoDB marshaling mechanism with one attribute per object property + * is not used. Instead, the entire object is serialized to JSON and stored in a single + * attribute, "item". The "version" property is also stored as a separate attribute since it + * is used for updates. + * + * Since DynamoDB doesn't have transactions, the init method - which replaces the entire data + * store - is not atomic, so there can be a race condition if another process is adding new data + * via upsert. To minimize this, we don't delete all the data at the start; instead, we update + * the items we've received, and then delete all other items. That could potentially result in + * deleting new data from another process, but that would be the case anyway if the init + * happened to execute later than the upsert(); we are relying on the fact that normally the + * process that did the init() will also receive the new data shortly and do its own upsert. + * + * DynamoDB has a maximum item size of 400KB. Since each feature flag or user segment is + * stored as a single item, this mechanism will not work for extremely large flags or segments. + * @internal + */ +export default class DynamoDBCore implements interfaces.PersistentDataStore { + constructor( + private readonly tableName: string, + private readonly state: DynamoDBClientState, + private readonly logger?: LDLogger + ) {} + + private initializedToken() { + const prefixed = stringValue(this.state.prefixedKey('$inited')); + return { namespace: prefixed, key: prefixed }; + } + + /** + * For a set of init data read all the existing data with matching namespaces. + * @param allData A set of init data. + * @returns A list of all data with matching namespaces. + */ + private async readExistingItems( + allData: interfaces.KindKeyedStore + ) { + const promises = allData.map((kind) => { + const { namespace } = kind.key; + return this.state.query(this.queryParamsForNamespace(namespace)); + }); + + const records = (await Promise.all(promises)).flat(); + return records; + } + + /** + * Mashal a SerializedItemDescriptor into an item for the DB. + * @param kind The kind of the data. + * @param item The item to marshal. + * @returns The marshalled data. + */ + private marshalItem( + kind: interfaces.PersistentStoreDataKind, + item: interfaces.KeyedItem + ): Record { + const dbItem: Record = { + namespace: stringValue(this.state.prefixedKey(kind.namespace)), + key: stringValue(item.key), + version: numberValue(item.item.version), + }; + if (item.item.serializedItem) { + dbItem.item = stringValue(item.item.serializedItem); + } + return dbItem; + } + + private unmarshalItem( + dbItem: Record + ): interfaces.SerializedItemDescriptor { + return { + // Version should exist. + version: parseInt(dbItem.version?.N || '0', 10), + // These fields may be undefined, and that is fine. + deleted: !!dbItem.deleted?.BOOL, + serializedItem: dbItem.item?.S, + }; + } + + async init( + allData: interfaces.KindKeyedStore, + callback: () => void + ) { + const items = await this.readExistingItems(allData); + + // Make a key from an existing DB item. + function makeNamespaceKey(item: Record) { + return `${item.namespace.S}$${item.key.S}`; + } + + const existingNamespaceKeys: { [key: string]: boolean } = {}; + items.forEach((item) => { + existingNamespaceKeys[makeNamespaceKey(item)] = true; + }); + delete existingNamespaceKeys[makeNamespaceKey(this.initializedToken())]; + + // Generate a list of write operations, and then execute them in a batch. + const ops: WriteRequest[] = []; + + allData.forEach((collection) => { + collection.item.forEach((item) => { + const dbItem = this.marshalItem(collection.key, item); + if (this.checkSizeLimit(dbItem)) { + delete existingNamespaceKeys[ + `${this.state.prefixedKey(collection.key.namespace)}$${item.key}` + ]; + ops.push({ PutRequest: { Item: dbItem } }); + } + }); + }); + + // Remove existing data that is not in the new list. + Object.keys(existingNamespaceKeys).forEach((namespaceKey) => { + const namespaceAndKey = namespaceKey.split('$'); + ops.push({ + DeleteRequest: { + Key: { namespace: stringValue(namespaceAndKey[0]), key: stringValue(namespaceAndKey[1]) }, + }, + }); + }); + + // Always write the initialized token when we initialize. + ops.push({ PutRequest: { Item: this.initializedToken() } }); + + await this.state.batchWrite(this.tableName, ops); + callback(); + } + + async get( + kind: interfaces.PersistentStoreDataKind, + key: string, + callback: (descriptor: interfaces.SerializedItemDescriptor | undefined) => void + ) { + const read = await this.state.get(this.tableName, { + namespace: stringValue(this.state.prefixedKey(kind.namespace)), + key: stringValue(key), + }); + if (read) { + callback(this.unmarshalItem(read)); + } else { + callback(undefined); + } + } + + async getAll( + kind: interfaces.PersistentStoreDataKind, + callback: ( + descriptors: interfaces.KeyedItem[] | undefined + ) => void + ) { + const params = this.queryParamsForNamespace(kind.namespace); + const results = await this.state.query(params); + callback(results.map((record) => ({ key: record!.key!.S!, item: this.unmarshalItem(record) }))); + } + + async upsert( + kind: interfaces.PersistentStoreDataKind, + key: string, + descriptor: interfaces.SerializedItemDescriptor, + callback: ( + err?: Error | undefined, + updatedDescriptor?: interfaces.SerializedItemDescriptor | undefined + ) => void + ) { + const params = this.makeVersionedPutRequest(kind, { key, item: descriptor }); + if (!this.checkSizeLimit(params.Item)) { + // We deliberately don't report this back to the SDK as an error, because we don't want to trigger any + // useless retry behavior. We just won't do the update. + callback(); + return; + } + + try { + await this.state.put(params); + this.get(kind, key, (readDescriptor) => { + callback(undefined, readDescriptor); + }); + } catch (err) { + callback(err as Error, undefined); + } + } + + async initialized(callback: (isInitialized: boolean) => void) { + let initialized = false; + try { + const token = this.initializedToken(); + const data = await this.state.get(this.tableName, token); + initialized = !!(data?.key?.S === token.key.S); + } catch (err) { + this.logger?.error(`Error reading inited: ${err}`); + initialized = false; + } + // Callback outside the try. In case it raised an exception. + callback(initialized); + } + + close(): void { + this.state.close(); + } + + getDescription(): string { + return 'DynamoDB'; + } + + private queryParamsForNamespace(namespace: string): QueryCommandInput { + return { + TableName: this.tableName, + KeyConditionExpression: 'namespace = :namespace', + FilterExpression: 'attribute_not_exists(deleted) OR deleted = :deleted', + ExpressionAttributeValues: { + ':namespace': stringValue(this.state.prefixedKey(namespace)), + ':deleted': boolValue(false), + }, + }; + } + + private makeVersionedPutRequest( + kind: interfaces.PersistentStoreDataKind, + item: interfaces.KeyedItem + ) { + return { + TableName: this.tableName, + Item: this.marshalItem(kind, item), + ConditionExpression: 'attribute_not_exists(version) OR version < :new_version', + ExpressionAttributeValues: { ':new_version': numberValue(item.item.version) }, + }; + } + + private checkSizeLimit(item: Record) { + const size = calculateSize(item); + + if (size <= DYNAMODB_MAX_SIZE) { + return true; + } + this.logger?.error( + `The item "${item.key.S}" in "${item.namespace.S}" was too large to store in DynamoDB and was dropped` + ); + return false; + } +} diff --git a/packages/store/node-server-sdk-dynamodb/src/DynamoDBFeatureStore.ts b/packages/store/node-server-sdk-dynamodb/src/DynamoDBFeatureStore.ts new file mode 100644 index 0000000000..220f7a1c68 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/src/DynamoDBFeatureStore.ts @@ -0,0 +1,64 @@ +import { + interfaces, + LDFeatureStore, + LDFeatureStoreDataStorage, + LDFeatureStoreItem, + LDFeatureStoreKindData, + LDKeyedFeatureStoreItem, + LDLogger, + PersistentDataStoreWrapper, +} from '@launchdarkly/node-server-sdk'; +import LDDynamoDBOptions from './LDDynamoDBOptions'; +import DynamoDBCore from './DynamoDBCore'; +import DynamoDBClientState from './DynamoDBClientState'; +import TtlFromOptions from './TtlFromOptions'; + +/** + * Integration between the LaunchDarkly SDK and DynamoDB. + */ +export default class DynamoDBFeatureStore implements LDFeatureStore { + private wrapper: PersistentDataStoreWrapper; + + constructor(tableName: string, options?: LDDynamoDBOptions, logger?: LDLogger) { + this.wrapper = new PersistentDataStoreWrapper( + new DynamoDBCore(tableName, new DynamoDBClientState(options), logger), + TtlFromOptions(options) + ); + } + + get( + kind: interfaces.DataKind, + key: string, + callback: (res: LDFeatureStoreItem | null) => void + ): void { + this.wrapper.get(kind, key, callback); + } + + all(kind: interfaces.DataKind, callback: (res: LDFeatureStoreKindData) => void): void { + this.wrapper.all(kind, callback); + } + + init(allData: LDFeatureStoreDataStorage, callback: () => void): void { + this.wrapper.init(allData, callback); + } + + delete(kind: interfaces.DataKind, key: string, version: number, callback: () => void): void { + this.wrapper.delete(kind, key, version, callback); + } + + upsert(kind: interfaces.DataKind, data: LDKeyedFeatureStoreItem, callback: () => void): void { + this.wrapper.upsert(kind, data, callback); + } + + initialized(callback: (isInitialized: boolean) => void): void { + this.wrapper.initialized(callback); + } + + close(): void { + this.wrapper.close(); + } + + getDescription?(): string { + return this.wrapper.getDescription(); + } +} diff --git a/packages/store/node-server-sdk-dynamodb/src/DynamoDBFeatureStoreFactory.ts b/packages/store/node-server-sdk-dynamodb/src/DynamoDBFeatureStoreFactory.ts new file mode 100644 index 0000000000..1c4868bd59 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/src/DynamoDBFeatureStoreFactory.ts @@ -0,0 +1,24 @@ +import { LDClientContext } from '@launchdarkly/node-server-sdk'; +import DynamoDBFeatureStore from './DynamoDBFeatureStore'; +import LDDynamoDBOptions from './LDDynamoDBOptions'; + +/** + * Configures a feature store backed by a DynamoDB instance. + * + * For more details about how and why you can use a persistent feature store, see + * the [Using DynamoDB as a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data/dynamodb#nodejs-server-side). + * + * @param tableName The table name in DynamoDB (required). The table must already exist. + * See: https://docs.launchdarkly.com/sdk/features/storing-data/dynamodb + * @param options Optional configuration, please refer to {@link LDDynamoDBOptions}. + * + * @returns + * A factory function suitable for use in the SDK configuration (LDOptions). + */ +export default function DynamoDBFeatureStoreFactory( + tableName: string, + options?: LDDynamoDBOptions +) { + return (config: LDClientContext) => + new DynamoDBFeatureStore(tableName, options, config.basicConfiguration.logger); +} diff --git a/packages/store/node-server-sdk-dynamodb/src/LDDynamoDBOptions.ts b/packages/store/node-server-sdk-dynamodb/src/LDDynamoDBOptions.ts new file mode 100644 index 0000000000..bcd78d4c66 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/src/LDDynamoDBOptions.ts @@ -0,0 +1,44 @@ +import { LDLogger } from '@launchdarkly/node-server-sdk'; +import { DynamoDBClient, DynamoDBClientConfig } from '@aws-sdk/client-dynamodb'; + +/** + * Options for configuring {@link DynamoDBFeatureStoreFactory} or {@link DynamoDBBigSegmentStoreFactory}. + */ +export default interface LDDynamoDBOptions { + /** + * Options to be passed to the DynamoDB client constructor, as defined by the AWS SDK. + */ + clientOptions?: DynamoDBClientConfig; + + /** + * Specifies an existing, already-configured DynamoDB client instance that the feature store + * should use rather than creating one of its own. If you specify an existing client, then the + * clientOptions property is ignored. + */ + dynamoDBClient?: DynamoDBClient; + + /** + * An optional namespace prefix for all keys stored in DynamoDB. Use this if you are sharing + * the same database table between multiple clients that are for different LaunchDarkly + * environments, to avoid key collisions. + */ + prefix?: string; + + /** + * The amount of time, in seconds, that recently read or updated items should remain in an + * in-memory cache. If it is zero, there will be no in-memory caching. + * + * This parameter applies only to {@link DynamoDBFeatureStore}. It is ignored for {@link DynamoDBBigSegmentStore}. + * Caching for {@link DynamoDBBigSegmentStore} is configured separately, in the SDK's + * `LDBigSegmentsOptions` type, since it is independent of what database implementation is used. + * + * If omitted, the default value is 15 seconds. + */ + cacheTTL?: number; + + /** + * A logger to be used for warnings and errors generated by the feature store. If not specified, + * it will use the SDK's logging configuration. + */ + logger?: LDLogger; +} diff --git a/packages/store/node-server-sdk-dynamodb/src/TtlFromOptions.ts b/packages/store/node-server-sdk-dynamodb/src/TtlFromOptions.ts new file mode 100644 index 0000000000..231505fc87 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/src/TtlFromOptions.ts @@ -0,0 +1,21 @@ +import LDDynamoDBOptions from './LDDynamoDBOptions'; + +/** + * The default TTL cache time in seconds. + */ +const DEFAULT_CACHE_TTL_S = 15; + +/** + * Get a cache TTL based on LDDynamoDBOptions. If the TTL is not specified, then + * the default of 15 seconds will be used. + * @param options The options to get a TTL for. + * @returns The TTL, in seconds. + * @internal + */ +export default function TtlFromOptions(options?: LDDynamoDBOptions): number { + // 0 is a valid option. So we need a null/undefined check. + if (options?.cacheTTL === undefined || options.cacheTTL === null) { + return DEFAULT_CACHE_TTL_S; + } + return options!.cacheTTL; +} diff --git a/packages/store/node-server-sdk-dynamodb/src/Value.ts b/packages/store/node-server-sdk-dynamodb/src/Value.ts new file mode 100644 index 0000000000..d135c6aea4 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/src/Value.ts @@ -0,0 +1,11 @@ +import { AttributeValue } from '@aws-sdk/client-dynamodb'; + +export function stringValue(val: string): AttributeValue { + return { S: val }; +} +export function boolValue(val: boolean): AttributeValue { + return { BOOL: val }; +} +export function numberValue(val: number): AttributeValue { + return { N: val.toString(10) }; +} diff --git a/packages/store/node-server-sdk-dynamodb/src/index.ts b/packages/store/node-server-sdk-dynamodb/src/index.ts new file mode 100644 index 0000000000..0fe088d5db --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/src/index.ts @@ -0,0 +1,7 @@ +// Exporting the factories without the 'Factory'. This keeps them in-line with +// previous store versions. The differentiation between the factory and the store +// is not critical for consuming the SDK. +export { default as DynamoDBFeatureStore } from './DynamoDBFeatureStoreFactory'; +export { default as DynamoDBBigSegmentStore } from './DynamoDBBigSegmentStoreFactory'; + +export { default as LDDynamoDBOptions } from './LDDynamoDBOptions'; diff --git a/packages/store/node-server-sdk-dynamodb/tsconfig.eslint.json b/packages/store/node-server-sdk-dynamodb/tsconfig.eslint.json new file mode 100644 index 0000000000..56c9b38305 --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/tsconfig.eslint.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/packages/store/node-server-sdk-dynamodb/tsconfig.json b/packages/store/node-server-sdk-dynamodb/tsconfig.json new file mode 100644 index 0000000000..1b9ac79b8c --- /dev/null +++ b/packages/store/node-server-sdk-dynamodb/tsconfig.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + // Uses "." so it can load package.json. + "rootDir": ".", + "outDir": "dist", + "target": "es6", + "lib": ["es6"], + "module": "commonjs", + "strict": true, + "noImplicitOverride": true, + // Needed for CommonJS modules: markdown-it, fs-extra + "allowSyntheticDefaultImports": true, + "sourceMap": true, + "declaration": true, + "declarationMap": true, // enables importers to jump to source + "resolveJsonModule": true, + "stripInternal": true, + "moduleResolution": "node" + }, + "exclude": ["**/*.test.ts", "dist", "node_modules", "__tests__"] +} diff --git a/tsconfig.json b/tsconfig.json index 09b46a8da1..93c61e83e4 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -33,6 +33,9 @@ }, { "path": "./packages/store/node-server-sdk-redis/tsconfig.ref.json" + }, + { + "path": "./packages/store/node-server-sdk-dynamodb/tsconfig.ref.json" } ] }