From 7d77faccd80dc99f00d7757578a7e9be9cb757c4 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 9 Oct 2024 12:17:12 -0300 Subject: [PATCH 01/30] Draft implementation of loadData and getSnapshot methods --- CHANGES.txt | 2 + src/sdkFactory/index.ts | 2 +- src/storages/__tests__/dataLoader.spec.ts | 31 +++++++ src/storages/dataLoader.ts | 94 ++++++++++++++-------- src/storages/inMemory/InMemoryStorageCS.ts | 23 +++++- src/storages/types.ts | 2 - src/types.ts | 17 ++-- 7 files changed, 124 insertions(+), 47 deletions(-) create mode 100644 src/storages/__tests__/dataLoader.spec.ts diff --git a/CHANGES.txt b/CHANGES.txt index 4c333159..0ab19c90 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,8 @@ 2.0.0 (October XX, 2024) - Added support for targeting rules based on large segments. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. + - Added `factory.getState()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. + - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. - Updated the handling of timers and async operations by moving them into an `init` factory method to enable lazy initialization of the SDK. This update is intended for the React SDK. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 41706cc6..c09b6fd6 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -56,7 +56,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); } }); - // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` + const clients: Record = {}; const telemetryTracker = telemetryTrackerFactory(storage.telemetry, platform.now); const integrationsManager = integrationsManagerFactory && integrationsManagerFactory({ settings, storage, telemetryTracker }); diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts new file mode 100644 index 00000000..c9f77849 --- /dev/null +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -0,0 +1,31 @@ +import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; +import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; +import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; + +import * as dataLoader from '../dataLoader'; + +test('loadData & getSnapshot', () => { + jest.spyOn(dataLoader, 'loadData'); + const onReadyFromCacheCb = jest.fn(); + // @ts-expect-error + const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); + serverStorage.splits.setChangeNumber(123); // @ts-expect-error + serverStorage.splits.addSplits([['split1', { name: 'split1' }]]); + serverStorage.segments.addToSegment('segment1', [fullSettings.core.key as string]); + + const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); + + // @ts-expect-error + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb }); + + // Assert + expect(dataLoader.loadData).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + expect(dataLoader.getSnapshot(clientStorage, [fullSettings.core.key as string])).toEqual(preloadedData); + expect(preloadedData).toEqual({ + since: 123, + splitsData: [{ name: 'split1' }], + mySegmentsData: { [fullSettings.core.key as string]: ['segment1'] }, + segmentsData: undefined + }); +}); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 24898d68..7b44df91 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,55 +1,85 @@ import { SplitIO } from '../types'; -import { DEFAULT_CACHE_EXPIRATION_IN_MILLIS } from '../utils/constants/browser'; -import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; +import { ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; +import { setToArray, ISet } from '../utils/lang/sets'; +import { getMatching } from '../utils/key'; /** - * Factory of client-side storage loader + * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function + * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) * - * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader - * and extended with a `mySegmentsData` property. - * @returns function to preload the storage + * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader and extended with a `mySegmentsData` property. + * @param storage object containing `splits` and `segments` cache (client-side variant) + * @param userKey user key (matching key) of the provided MySegmentsCache + * + * @TODO extend to load largeSegments + * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. + * @TODO add logs, and input validation in this module, in favor of size reduction. + * @TODO unit tests */ -export function dataLoaderFactory(preloadedData: SplitIO.PreloadedData): DataLoader { - - /** - * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function - * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) - * - * @param storage object containing `splits` and `segments` cache (client-side variant) - * @param userId user key string of the provided MySegmentsCache - * - * @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. - * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. - */ - return function loadData(storage: { splits: ISplitsCacheSync, segments: ISegmentsCacheSync }, userId: string) { - // Do not load data if current preloadedData is empty - if (Object.keys(preloadedData).length === 0) return; - - const { lastUpdated = -1, segmentsData = {}, since = -1, splitsData = {} } = preloadedData; +export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { + // Do not load data if current preloadedData is empty + if (Object.keys(preloadedData).length === 0) return; + + const { segmentsData = {}, since = -1, splitsData = [] } = preloadedData; + if (storage.splits) { const storedSince = storage.splits.getChangeNumber(); - const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; - // Do not load data if current localStorage data is more recent, - // or if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, - if (storedSince > since || lastUpdated < expirationTimestamp) return; + // Do not load data if current data is more recent + if (storedSince > since) return; // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data storage.splits.clear(); storage.splits.setChangeNumber(since); // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.splits.addSplits(Object.keys(splitsData).map(splitName => JSON.parse(splitsData[splitName]))); + storage.splits.addSplits(splitsData.map(split => ([split.name, split]))); + } - // add mySegments data - let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[userId]; + if (matchingKey) { // add mySegments data (client-side) + let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[matchingKey]; if (!mySegmentsData) { // segmentsData in an object where the property is the segment name and the pertaining value is a stringified object that contains the `added` array of userIds mySegmentsData = Object.keys(segmentsData).filter(segmentName => { - const userIds = JSON.parse(segmentsData[segmentName]).added; - return Array.isArray(userIds) && userIds.indexOf(userId) > -1; + const matchingKeys = segmentsData[segmentName]; + return matchingKeys.indexOf(matchingKey) > -1; }); } storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); + } else { // add segments data (server-side) + Object.keys(segmentsData).filter(segmentName => { + const matchingKeys = segmentsData[segmentName]; + storage.segments.addToSegment(segmentName, matchingKeys); + }); + } +} + +export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { + return { + // lastUpdated: Date.now(), + since: storage.splits.getChangeNumber(), + splitsData: storage.splits.getAll(), + segmentsData: userKeys ? + undefined : // @ts-ignore accessing private prop + Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop + prev[cur] = setToArray(storage.segments.segmentCache[cur] as ISet); + return prev; + }, {}), + mySegmentsData: userKeys ? + userKeys.reduce>((prev, userKey) => { + prev[getMatching(userKey)] = storage.shared ? + // Client-side segments + // @ts-ignore accessing private prop + Object.keys(storage.shared(userKey).segments.segmentCache) : + // Server-side segments + // @ts-ignore accessing private prop + Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop + return storage.segments.segmentCache[segmentName].has(userKey) ? + prev.concat(segmentName) : + prev; + }, []); + return prev; + }, {}) : + undefined }; } diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 30667369..670b91f1 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -7,6 +7,8 @@ import { ImpressionCountsCacheInMemory } from './ImpressionCountsCacheInMemory'; import { DEBUG, LOCALHOST_MODE, NONE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; +import { getMatching } from '../../utils/key'; +import { loadData } from '../dataLoader'; /** * InMemory storage factory for standalone client-side SplitFactory @@ -14,7 +16,7 @@ import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; * @param params parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; + const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; const splits = new SplitsCacheInMemory(__splitFiltersValidation); const segments = new MySegmentsCacheInMemory(); @@ -42,11 +44,18 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag }, // When using shared instanciation with MEMORY we reuse everything but segments (they are unique per key) - shared() { + shared(matchingKey: string) { + const segments = new MySegmentsCacheInMemory(); + const largeSegments = new MySegmentsCacheInMemory(); + + if (preloadedData) { + loadData(preloadedData, { segments, largeSegments }, matchingKey); + } + return { splits: this.splits, - segments: new MySegmentsCacheInMemory(), - largeSegments: new MySegmentsCacheInMemory(), + segments, + largeSegments, impressions: this.impressions, impressionCounts: this.impressionCounts, events: this.events, @@ -72,6 +81,12 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag if (storage.uniqueKeys) storage.uniqueKeys.track = noopTrack; } + + if (preloadedData) { + loadData(preloadedData, storage, getMatching(params.settings.core.key)); + if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); + } + return storage; } diff --git a/src/storages/types.ts b/src/storages/types.ts index 61ab10f2..21945587 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -492,8 +492,6 @@ export interface IStorageAsync extends IStorageBase< /** StorageFactory */ -export type DataLoader = (storage: IStorageSync, matchingKey: string) => void - export interface IStorageFactoryParams { settings: ISettings, /** diff --git a/src/types.ts b/src/types.ts index 2a65b297..777b3258 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,4 +1,4 @@ -import { ISplitFiltersValidation } from './dtos/types'; +import { ISplit, ISplitFiltersValidation } from './dtos/types'; import { IIntegration, IIntegrationFactoryParams } from './integrations/types'; import { ILogger } from './logger/types'; import { ISdkFactoryContext } from './sdkFactory/types'; @@ -98,6 +98,7 @@ export interface ISettings { eventsFirstPushWindow: number }, readonly storage: IStorageSyncFactory | IStorageAsyncFactory, + readonly preloadedData?: SplitIO.PreloadedData, readonly integrations: Array<{ readonly type: string, (params: IIntegrationFactoryParams): IIntegration | void @@ -771,21 +772,20 @@ export namespace SplitIO { * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. * @TODO configurable expiration time policy? */ - lastUpdated: number, + // lastUpdated: number, /** * Change number of the preloaded data. * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. */ since: number, /** - * Map of feature flags to their stringified definitions. + * List of feature flag definitions. + * @TODO rename to flags */ - splitsData: { - [splitName: string]: string - }, + splitsData: ISplit[], /** * Optional map of user keys to their list of segments. - * @TODO remove when releasing first version + * @TODO rename to memberships */ mySegmentsData?: { [key: string]: string[] @@ -793,9 +793,10 @@ export namespace SplitIO { /** * Optional map of segments to their stringified definitions. * This property is ignored if `mySegmentsData` was provided. + * @TODO rename to segments */ segmentsData?: { - [segmentName: string]: string + [segmentName: string]: string[] }, } /** From b8b12cddf0351cff789a069391ba1ef42887f19f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 18:16:35 -0300 Subject: [PATCH 02/30] Update data loader to support memberships --- src/sdkFactory/index.ts | 7 ++- src/storages/__tests__/dataLoader.spec.ts | 4 +- src/storages/dataLoader.ts | 77 ++++++++++++++++------- src/storages/types.ts | 4 ++ src/types.ts | 12 ++-- 5 files changed, 70 insertions(+), 34 deletions(-) diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 5ab47ddf..0e9feda4 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -7,7 +7,7 @@ import { IBasicClient, SplitIO } from '../types'; import { validateAndTrackApiKey } from '../utils/inputValidation/apiKey'; import { createLoggerAPI } from '../logger/sdkLogger'; import { NEW_FACTORY, RETRIEVE_MANAGER } from '../logger/constants'; -import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; +import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_SPLITS_CACHE_LOADED } from '../readiness/constants'; import { objectAssign } from '../utils/lang/objectAssign'; import { strategyDebugFactory } from '../trackers/strategy/strategyDebug'; import { strategyOptimizedFactory } from '../trackers/strategy/strategyOptimized'; @@ -43,7 +43,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. const storage = storageFactory({ settings, - onReadyCb: (error) => { + onReadyCb(error) { if (error) { // If storage fails to connect, SDK_READY_TIMED_OUT event is emitted immediately. Review when timeout and non-recoverable errors are reworked readiness.timeout(); @@ -52,6 +52,9 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. readiness.splits.emit(SDK_SPLITS_ARRIVED); readiness.segments.emit(SDK_SEGMENTS_ARRIVED); }, + onReadyFromCacheCb() { + readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); + } }); const clients: Record = {}; diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index c9f77849..522feb99 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -11,7 +11,7 @@ test('loadData & getSnapshot', () => { const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); serverStorage.splits.setChangeNumber(123); // @ts-expect-error serverStorage.splits.addSplits([['split1', { name: 'split1' }]]); - serverStorage.segments.addToSegment('segment1', [fullSettings.core.key as string]); + serverStorage.segments.update('segment1', [fullSettings.core.key as string], [], 123); const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); @@ -25,7 +25,7 @@ test('loadData & getSnapshot', () => { expect(preloadedData).toEqual({ since: 123, splitsData: [{ name: 'split1' }], - mySegmentsData: { [fullSettings.core.key as string]: ['segment1'] }, + membershipsData: { [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } }, segmentsData: undefined }); }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 7b44df91..4efabcc9 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,7 +1,8 @@ import { SplitIO } from '../types'; import { ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; -import { setToArray, ISet } from '../utils/lang/sets'; +import { setToArray } from '../utils/lang/sets'; import { getMatching } from '../utils/key'; +import { IMembershipsResponse, IMySegmentsResponse } from '../dtos/types'; /** * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function @@ -37,19 +38,26 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits } if (matchingKey) { // add mySegments data (client-side) - let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[matchingKey]; - if (!mySegmentsData) { - // segmentsData in an object where the property is the segment name and the pertaining value is a stringified object that contains the `added` array of userIds - mySegmentsData = Object.keys(segmentsData).filter(segmentName => { - const matchingKeys = segmentsData[segmentName]; - return matchingKeys.indexOf(matchingKey) > -1; - }); + let membershipsData = preloadedData.membershipsData && preloadedData.membershipsData[matchingKey]; + if (!membershipsData && segmentsData) { + membershipsData = { + ms: { + k: Object.keys(segmentsData).filter(segmentName => { + const segmentKeys = segmentsData[segmentName]; + return segmentKeys.indexOf(matchingKey) > -1; + }).map(segmentName => ({ n: segmentName })) + } + }; } - storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); + if (membershipsData) { + if (membershipsData.ms) storage.segments.resetSegments(membershipsData.ms); + if (membershipsData.ls && storage.largeSegments) storage.largeSegments.resetSegments(membershipsData.ls); + } + } else { // add segments data (server-side) - Object.keys(segmentsData).filter(segmentName => { - const matchingKeys = segmentsData[segmentName]; - storage.segments.addToSegment(segmentName, matchingKeys); + Object.keys(segmentsData).forEach(segmentName => { + const segmentKeys = segmentsData[segmentName]; + storage.segments.update(segmentName, segmentKeys, [], -1); }); } } @@ -62,22 +70,43 @@ export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[] segmentsData: userKeys ? undefined : // @ts-ignore accessing private prop Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop - prev[cur] = setToArray(storage.segments.segmentCache[cur] as ISet); + prev[cur] = setToArray(storage.segments.segmentCache[cur] as Set); return prev; }, {}), - mySegmentsData: userKeys ? - userKeys.reduce>((prev, userKey) => { - prev[getMatching(userKey)] = storage.shared ? + membershipsData: userKeys ? + userKeys.reduce>((prev, userKey) => { + if (storage.shared) { // Client-side segments // @ts-ignore accessing private prop - Object.keys(storage.shared(userKey).segments.segmentCache) : - // Server-side segments - // @ts-ignore accessing private prop - Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop - return storage.segments.segmentCache[segmentName].has(userKey) ? - prev.concat(segmentName) : - prev; - }, []); + const sharedStorage = storage.shared(userKey); + prev[getMatching(userKey)] = { + ms: { + // @ts-ignore accessing private prop + k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), + // cn: sharedStorage.segments.getChangeNumber() + }, + ls: sharedStorage.largeSegments ? { + // @ts-ignore accessing private prop + k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), + // cn: sharedStorage.largeSegments.getChangeNumber() + } : undefined + }; + } else { + prev[getMatching(userKey)] = { + ms: { + // Server-side segments + // @ts-ignore accessing private prop + k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop + return storage.segments.segmentCache[segmentName].has(userKey) ? + prev!.concat({ n: segmentName }) : + prev; + }, []) + }, + ls: { + k: [] + } + }; + } return prev; }, {}) : undefined diff --git a/src/storages/types.ts b/src/storages/types.ts index 08a9d387..8be2c731 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -495,6 +495,10 @@ export interface IStorageFactoryParams { * It is meant for emitting SDK_READY event in consumer mode, and waiting before using the storage in the synchronizer. */ onReadyCb: (error?: any) => void, + /** + * It is meant for emitting SDK_READY_FROM_CACHE event in standalone mode with preloaded data + */ + onReadyFromCacheCb: () => void, } export type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; diff --git a/src/types.ts b/src/types.ts index 92a44112..9b77ced5 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,4 +1,4 @@ -import { ISplit, ISplitFiltersValidation } from './dtos/types'; +import { IMembershipsResponse, ISplit, ISplitFiltersValidation } from './dtos/types'; import { IIntegration, IIntegrationFactoryParams } from './integrations/types'; import { ILogger } from './logger/types'; import { ISdkFactoryContext } from './sdkFactory/types'; @@ -783,15 +783,15 @@ export namespace SplitIO { */ splitsData: ISplit[], /** - * Optional map of user keys to their list of segments. + * Optional map of user keys to their memberships. * @TODO rename to memberships */ - mySegmentsData?: { - [key: string]: string[] + membershipsData?: { + [key: string]: IMembershipsResponse }, /** - * Optional map of segments to their stringified definitions. - * This property is ignored if `mySegmentsData` was provided. + * Optional map of segments to their list of keys. + * This property is ignored if `membershipsData` was provided. * @TODO rename to segments */ segmentsData?: { From 325ecdaf586d6d2d0a9e667890694b42bad2ae25 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 18:25:47 -0300 Subject: [PATCH 03/30] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 8bc19544..6cf6d6f5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.1", + "version": "2.0.0-rc.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.1", + "version": "2.0.0-rc.2", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 870e561c..4ddeb4c8 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.1", + "version": "2.0.0-rc.2", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From fe0f353feb6e5f8e01aca65c44b008e65eed9ca7 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 30 May 2025 10:54:40 -0300 Subject: [PATCH 04/30] Add usesSegmentsSync utility function to reuse code --- src/storages/AbstractSplitsCacheSync.ts | 6 +++++- src/sync/polling/pollingManagerCS.ts | 9 +++++---- src/sync/polling/updaters/mySegmentsUpdater.ts | 5 +++-- src/sync/syncManagerOnline.ts | 5 +++-- 4 files changed, 16 insertions(+), 9 deletions(-) diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index 761c5cb9..2a4b9b78 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -1,4 +1,4 @@ -import { ISplitsCacheSync } from './types'; +import { ISplitsCacheSync, IStorageSync } from './types'; import { IRBSegment, ISplit } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; import { IN_SEGMENT, IN_LARGE_SEGMENT } from '../utils/constants'; @@ -88,3 +88,7 @@ export function usesSegments(ruleEntity: ISplit | IRBSegment) { return false; } + +export function usesSegmentsSync(storage: Pick) { + return storage.splits.usesSegments() || storage.rbSegments.usesSegments(); +} diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 6a5ba679..5e197e62 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -8,6 +8,7 @@ import { getMatching } from '../../utils/key'; import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../readiness/constants'; import { POLLING_SMART_PAUSING, POLLING_START, POLLING_STOP } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; +import { usesSegmentsSync } from '../../storages/AbstractSplitsCacheSync'; /** * Expose start / stop mechanism for polling data from services. @@ -43,7 +44,7 @@ export function pollingManagerCSFactory( // smart pausing readiness.splits.on(SDK_SPLITS_ARRIVED, () => { if (!splitsSyncTask.isRunning()) return; // noop if not doing polling - const usingSegments = storage.splits.usesSegments() || storage.rbSegments.usesSegments(); + const usingSegments = usesSegmentsSync(storage); if (usingSegments !== mySegmentsSyncTask.isRunning()) { log.info(POLLING_SMART_PAUSING, [usingSegments ? 'ON' : 'OFF']); if (usingSegments) { @@ -59,9 +60,9 @@ export function pollingManagerCSFactory( // smart ready function smartReady() { - if (!readiness.isReady() && !storage.splits.usesSegments() && !storage.rbSegments.usesSegments()) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + if (!readiness.isReady() && !usesSegmentsSync(storage)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); } - if (!storage.splits.usesSegments() && !storage.rbSegments.usesSegments()) setTimeout(smartReady, 0); + if (!usesSegmentsSync(storage)) setTimeout(smartReady, 0); else readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); mySegmentsSyncTasks[matchingKey] = mySegmentsSyncTask; @@ -77,7 +78,7 @@ export function pollingManagerCSFactory( log.info(POLLING_START); splitsSyncTask.start(); - if (storage.splits.usesSegments() || storage.rbSegments.usesSegments()) startMySegmentsSyncTasks(); + if (usesSegmentsSync(storage)) startMySegmentsSyncTasks(); }, // Stop periodic fetching (polling) diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 501e3b7a..5de512fa 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -8,6 +8,7 @@ import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; import { IMembershipsResponse } from '../../../dtos/types'; import { MEMBERSHIPS_LS_UPDATE } from '../../streaming/constants'; +import { usesSegmentsSync } from '../../../storages/AbstractSplitsCacheSync'; type IMySegmentsUpdater = (segmentsData?: MySegmentsData, noCache?: boolean, till?: number) => Promise @@ -27,7 +28,7 @@ export function mySegmentsUpdaterFactory( matchingKey: string ): IMySegmentsUpdater { - const { splits, rbSegments, segments, largeSegments } = storage; + const { segments, largeSegments } = storage; let readyOnAlreadyExistentState = true; let startingUp = true; @@ -51,7 +52,7 @@ export function mySegmentsUpdaterFactory( } // Notify update if required - if ((splits.usesSegments() || rbSegments.usesSegments()) && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { + if (usesSegmentsSync(storage) && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { readyOnAlreadyExistentState = false; segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED); } diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index 21bf81e7..aac6f7e4 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -10,6 +10,7 @@ import { isConsentGranted } from '../consent'; import { POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; import { ISdkFactoryContextSync } from '../sdkFactory/types'; import { SDK_SPLITS_CACHE_LOADED } from '../readiness/constants'; +import { usesSegmentsSync } from '../storages/AbstractSplitsCacheSync'; /** * Online SyncManager factory. @@ -155,14 +156,14 @@ export function syncManagerOnlineFactory( if (pushManager) { if (pollingManager.isRunning()) { // if doing polling, we must start the periodic fetch of data - if (storage.splits.usesSegments() || storage.rbSegments.usesSegments()) mySegmentsSyncTask.start(); + if (usesSegmentsSync(storage)) mySegmentsSyncTask.start(); } else { // if not polling, we must execute the sync task for the initial fetch // of segments since `syncAll` was already executed when starting the main client mySegmentsSyncTask.execute(); } } else { - if (storage.splits.usesSegments() || storage.rbSegments.usesSegments()) mySegmentsSyncTask.start(); + if (usesSegmentsSync(storage)) mySegmentsSyncTask.start(); } } else { if (!readinessManager.isReady()) mySegmentsSyncTask.execute(); From 334288291b1525bec2219fa5ccc70dad75186850 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Sun, 1 Jun 2025 19:01:45 -0300 Subject: [PATCH 05/30] Test assertion --- .../inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts | 3 +++ src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts | 3 +++ 2 files changed, 6 insertions(+) diff --git a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts index 913d6a3b..13ab1b32 100644 --- a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts @@ -173,6 +173,9 @@ test('SPLITS CACHE / LocalStorage / flag set cache tests', () => { ], [], -1); cache.addSplit(featureFlagWithEmptyFS); + // Adding an existing FF should not affect the cache + cache.update([featureFlagTwo], [], -1); + expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); expect(cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); expect(cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); diff --git a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts index 56ca1300..2ed4478b 100644 --- a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts @@ -135,6 +135,9 @@ test('SPLITS CACHE / In Memory / flag set cache tests', () => { ], [], -1); cache.addSplit(featureFlagWithEmptyFS); + // Adding an existing FF should not affect the cache + cache.update([featureFlagTwo], [], -1); + expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); expect(cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); expect(cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); From cd54074d6393c61257b07a4636856ef0ffe79514 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 11 Jun 2025 15:44:26 -0300 Subject: [PATCH 06/30] Fix comments in type definitions --- types/splitio.d.ts | 38 ++++++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 14 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index ad8644b2..377d3234 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -66,12 +66,17 @@ interface ISharedSettings { * * @example * ``` - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; + * const factory = SplitFactory({ + * ... + * sync: { + * getHeaderOverrides(context) { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * } + * } + * }); * ``` */ getHeaderOverrides?: (context: { headers: Record }) => Record; @@ -952,7 +957,7 @@ declare namespace SplitIO { */ prefix?: string; /** - * Number of days before cached data expires if it was not updated. If cache expires, it is cleared on initialization. + * Number of days before cached data expires if it was not successfully synchronized (i.e., last SDK_READY or SDK_UPDATE event emitted). If cache expires, it is cleared on initialization. * * @defaultValue `10` */ @@ -1292,7 +1297,7 @@ declare namespace SplitIO { */ prefix?: string; /** - * Optional settings for the 'LOCALSTORAGE' storage type. It specifies the number of days before cached data expires if it was not updated. If cache expires, it is cleared on initialization. + * Optional settings for the 'LOCALSTORAGE' storage type. It specifies the number of days before cached data expires if it was not successfully synchronized (i.e., last SDK_READY or SDK_UPDATE event emitted). If cache expires, it is cleared on initialization. * * @defaultValue `10` */ @@ -1350,12 +1355,17 @@ declare namespace SplitIO { * * @example * ``` - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; + * const factory = SplitFactory({ + * ... + * sync: { + * getHeaderOverrides(context) { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * } + * } + * }); * ``` */ getHeaderOverrides?: (context: { headers: Record }) => Record; From 939013e64922b945419c04e941c84d686a887b2c Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 25 Jun 2025 15:45:03 -0300 Subject: [PATCH 07/30] Vulnerability fixes --- package-lock.json | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/package-lock.json b/package-lock.json index afd5917c..c9b99914 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1780,10 +1780,11 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } @@ -2218,10 +2219,11 @@ "dev": true }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -9378,9 +9380,9 @@ }, "dependencies": { "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "requires": { "balanced-match": "^1.0.0" @@ -9700,9 +9702,9 @@ "dev": true }, "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "requires": { "balanced-match": "^1.0.0", From 8cc3531c70dd296bc61ac11ed38a1d8040fe9229 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 9 Jul 2025 11:11:10 -0300 Subject: [PATCH 08/30] Improve HTTP error message formatting --- src/logger/messages/error.ts | 2 +- src/services/splitHttpClient.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/logger/messages/error.ts b/src/logger/messages/error.ts index 123f8eee..70a87eb0 100644 --- a/src/logger/messages/error.ts +++ b/src/logger/messages/error.ts @@ -14,7 +14,7 @@ export const codesError: [number, string][] = [ [c.ERROR_SYNC_OFFLINE_LOADING, c.LOG_PREFIX_SYNC_OFFLINE + 'There was an issue loading the mock feature flags data. No changes will be applied to the current cache. %s'], [c.ERROR_STREAMING_SSE, c.LOG_PREFIX_SYNC_STREAMING + 'Failed to connect or error on streaming connection, with error message: %s'], [c.ERROR_STREAMING_AUTH, c.LOG_PREFIX_SYNC_STREAMING + 'Failed to authenticate for streaming. Error: %s.'], - [c.ERROR_HTTP, 'Response status is not OK. Status: %s. URL: %s. Message: %s'], + [c.ERROR_HTTP, 'HTTP request failed with %s. URL: %s. Message: %s'], // client status [c.ERROR_CLIENT_LISTENER, 'A listener was added for %s on the SDK, which has already fired and won\'t be emitted again. The callback won\'t be executed.'], [c.ERROR_CLIENT_DESTROYED, '%s: Client has already been destroyed - no calls possible.'], diff --git a/src/services/splitHttpClient.ts b/src/services/splitHttpClient.ts index dcb841c8..89e12533 100644 --- a/src/services/splitHttpClient.ts +++ b/src/services/splitHttpClient.ts @@ -70,7 +70,7 @@ export function splitHttpClientFactory(settings: ISettings, { getOptions, getFet } if (!resp || resp.status !== 403) { // 403's log we'll be handled somewhere else. - log[logErrorsAsInfo ? 'info' : 'error'](ERROR_HTTP, [resp ? resp.status : 'NO_STATUS', url, msg]); + log[logErrorsAsInfo ? 'info' : 'error'](ERROR_HTTP, [resp ? 'status code ' + resp.status : 'no status code', url, msg]); } const networkError: NetworkError = new Error(msg); From 39028c1748467fa2188c2874b5f60d0310bd506b Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Sun, 20 Jul 2025 11:27:55 -0300 Subject: [PATCH 09/30] enhance split filters documentation --- src/sync/polling/updaters/splitChangesUpdater.ts | 2 +- src/utils/settingsValidation/splitFilters.ts | 6 ------ types/splitio.d.ts | 12 ++++++++---- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/splitChangesUpdater.ts index ea5e5e44..0331bc43 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/splitChangesUpdater.ts @@ -59,7 +59,7 @@ interface ISplitMutations { /** * If there are defined filters and one feature flag doesn't match with them, its status is changed to 'ARCHIVE' to avoid storing it - * If there are set filter defined, names filter is ignored + * If there is `bySet` filter, `byName` and `byPrefix` filters are ignored * * @param featureFlag - feature flag to be evaluated * @param filters - splitFiltersValidation bySet | byName diff --git a/src/utils/settingsValidation/splitFilters.ts b/src/utils/settingsValidation/splitFilters.ts index cea3117f..455d3ee1 100644 --- a/src/utils/settingsValidation/splitFilters.ts +++ b/src/utils/settingsValidation/splitFilters.ts @@ -69,12 +69,6 @@ function validateSplitFilter(log: ILogger, type: SplitIO.SplitFilterType, values /** * Returns a string representing the URL encoded query component of /splitChanges URL. * - * The possible formats of the query string are: - * - null: if all filters are empty - * - '&names=': if only `byPrefix` filter is undefined - * - '&prefixes=': if only `byName` filter is undefined - * - '&names=&prefixes=': if no one is undefined - * * @param groupedFilters - object of filters. Each filter must be a list of valid, unique and ordered string values. * @returns null or string with the `split filter query` component of the URL. */ diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 377d3234..e85ab01b 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -24,7 +24,11 @@ interface ISharedSettings { sync?: { /** * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. - * This configuration is only meaningful when the SDK is working in "standalone" mode. + * + * NOTES: + * - This configuration is only meaningful when the SDK is working in `"standalone"` mode. + * - If `bySet` filter is provided, `byName` and `byPrefix` filters are ignored. + * - If both `byName` and `byPrefix` filters are provided, the intersection of the two groups of feature flags is fetched. * * Example: * ``` @@ -69,7 +73,7 @@ interface ISharedSettings { * const factory = SplitFactory({ * ... * sync: { - * getHeaderOverrides(context) { + * getHeaderOverrides: (context) => { * return { * 'Authorization': context.headers['Authorization'] + ', other-value', * 'custom-header': 'custom-value' @@ -1135,7 +1139,7 @@ declare namespace SplitIO { */ type: SplitFilterType; /** - * List of values: feature flag names for 'byName' filter type, and feature flag name prefixes for 'byPrefix' type. + * List of values: flag set names for 'bySet' filter type, feature flag names for 'byName' filter type, and feature flag name prefixes for 'byPrefix' type. */ values: string[]; } @@ -1358,7 +1362,7 @@ declare namespace SplitIO { * const factory = SplitFactory({ * ... * sync: { - * getHeaderOverrides(context) { + * getHeaderOverrides: (context) => { * return { * 'Authorization': context.headers['Authorization'] + ', other-value', * 'custom-header': 'custom-value' From e2ceee4d71c008378c5a3b01756b78838321dd1d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Jul 2025 23:17:31 +0000 Subject: [PATCH 10/30] Bump form-data from 3.0.1 to 3.0.4 Bumps [form-data](https://github.com/form-data/form-data) from 3.0.1 to 3.0.4. - [Release notes](https://github.com/form-data/form-data/releases) - [Changelog](https://github.com/form-data/form-data/blob/v3.0.4/CHANGELOG.md) - [Commits](https://github.com/form-data/form-data/compare/v3.0.1...v3.0.4) --- updated-dependencies: - dependency-name: form-data dependency-version: 3.0.4 dependency-type: indirect ... Signed-off-by: dependabot[bot] --- package-lock.json | 271 ++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 237 insertions(+), 34 deletions(-) diff --git a/package-lock.json b/package-lock.json index ea4ce2f6..fd6d8782 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2317,6 +2317,19 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -2666,6 +2679,20 @@ "node": ">=8" } }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/electron-to-chromium": { "version": "1.5.33", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.33.tgz", @@ -2733,6 +2760,51 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-to-primitive": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", @@ -3705,14 +3777,16 @@ "dev": true }, "node_modules/form-data": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", - "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.4.tgz", + "integrity": "sha512-f0cRzm6dkyVYV3nPoooP8XlccPQukegwhAnpoLcXy+X+A8KfpGOoXwDr9FLZd3wzgLaBGQBE3lY93Zm/i1JvIQ==", "dev": true, "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.35" }, "engines": { "node": ">= 6" @@ -3766,14 +3840,24 @@ } }, "node_modules/get-intrinsic": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", - "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", "dev": true, "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1" + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -3788,6 +3872,19 @@ "node": ">=8.0.0" } }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", @@ -3883,6 +3980,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", @@ -3917,9 +4026,9 @@ } }, "node_modules/has-symbols": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", - "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", "dev": true, "engines": { "node": ">= 0.4" @@ -3929,12 +4038,12 @@ } }, "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dev": true, "dependencies": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -6449,6 +6558,15 @@ "tmpl": "1.0.5" } }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -9770,6 +9888,16 @@ "get-intrinsic": "^1.0.2" } }, + "call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "requires": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + } + }, "callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -10020,6 +10148,17 @@ } } }, + "dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "requires": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + } + }, "electron-to-chromium": { "version": "1.5.33", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.33.tgz", @@ -10075,6 +10214,39 @@ "unbox-primitive": "^1.0.1" } }, + "es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true + }, + "es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true + }, + "es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "requires": { + "es-errors": "^1.3.0" + } + }, + "es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "requires": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + } + }, "es-to-primitive": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", @@ -10797,14 +10969,16 @@ "dev": true }, "form-data": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", - "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.4.tgz", + "integrity": "sha512-f0cRzm6dkyVYV3nPoooP8XlccPQukegwhAnpoLcXy+X+A8KfpGOoXwDr9FLZd3wzgLaBGQBE3lY93Zm/i1JvIQ==", "dev": true, "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.35" } }, "fs.realpath": { @@ -10839,14 +11013,21 @@ "dev": true }, "get-intrinsic": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", - "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", "dev": true, "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1" + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" } }, "get-package-type": { @@ -10855,6 +11036,16 @@ "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", "dev": true }, + "get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "requires": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + } + }, "get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", @@ -10920,6 +11111,12 @@ "slash": "^3.0.0" } }, + "gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true + }, "graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", @@ -10948,18 +11145,18 @@ "dev": true }, "has-symbols": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", - "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", "dev": true }, "has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dev": true, "requires": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" } }, "hasown": { @@ -12828,6 +13025,12 @@ "tmpl": "1.0.5" } }, + "math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true + }, "merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", From cc3ea62b129db8163de89c7cd918c5005676f4b9 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 24 Jul 2025 12:27:37 -0300 Subject: [PATCH 11/30] docs: clarify SDK sync pause during HTTP request timeouts --- CHANGES.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index 499e296f..ab2f42c2 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -6,7 +6,7 @@ - Updated the Redis storage to: - Avoid lazy require of the `ioredis` dependency when the SDK is initialized, and - Flag the SDK as ready from cache immediately to allow queueing feature flag evaluations before SDK_READY event is emitted (Reverted in v1.7.0). - - Bugfix - Enhanced HTTP client module to implement timeouts for failing requests that might otherwise remain pending indefinitely on some Fetch API implementations. + - Bugfix - Enhanced HTTP client module to implement timeouts for failing requests that might otherwise remain pending indefinitely on some Fetch API implementations, pausing the SDK synchronization process. 2.2.0 (March 28, 2025) - Added a new optional argument to the client `getTreatment` methods to allow passing additional evaluation options, such as a map of properties to append to the generated impressions sent to Split backend. Read more in our docs. From 60ccbf8e77994b55a3835558af1d3c2d4593bc0f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 22 Aug 2025 19:08:32 -0300 Subject: [PATCH 12/30] Add RBSegments --- src/storages/__tests__/dataLoader.spec.ts | 5 +++- src/storages/dataLoader.ts | 25 +++++++++++++++---- .../inLocalStorage/RBSegmentsCacheInLocal.ts | 4 +++ .../inMemory/RBSegmentsCacheInMemory.ts | 4 +++ src/storages/types.ts | 1 + types/splitio.d.ts | 7 ++++-- 6 files changed, 38 insertions(+), 8 deletions(-) diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index f7ba9e97..d1ab77c8 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -9,7 +9,8 @@ test('loadData & getSnapshot', () => { const onReadyFromCacheCb = jest.fn(); // @ts-expect-error const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); // @ts-expect-error - serverStorage.splits.update([{ name: 'split1' }], [], 123); + serverStorage.splits.update([{ name: 'split1' }], [], 123); // @ts-expect-error + serverStorage.rbSegments.update([{ name: 'rbs1' }], [], 321); serverStorage.segments.update('segment1', [fullSettings.core.key as string], [], 123); const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); @@ -24,6 +25,8 @@ test('loadData & getSnapshot', () => { expect(preloadedData).toEqual({ since: 123, flags: [{ name: 'split1' }], + rbSince: 321, + rbSegments: [{ name: 'rbs1' }], memberships: { [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } }, segments: undefined }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 7550de05..c51986b9 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,8 +1,8 @@ import SplitIO from '../../types/splitio'; -import { ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; +import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; import { setToArray } from '../utils/lang/sets'; import { getMatching } from '../utils/key'; -import { IMembershipsResponse, IMySegmentsResponse, ISplit } from '../dtos/types'; +import { IMembershipsResponse, IMySegmentsResponse, IRBSegment, ISplit } from '../dtos/types'; /** * @@ -10,16 +10,17 @@ import { IMembershipsResponse, IMySegmentsResponse, ISplit } from '../dtos/types * @param storage - object containing `splits` and `segments` cache (client-side variant) * @param userKey - user key (matching key) of the provided MySegmentsCache * + * @TODO load data even if current data is more recent? * @TODO extend to load largeSegments * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. * @TODO add logs, and input validation in this module, in favor of size reduction. * @TODO unit tests */ -export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { +export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { // Do not load data if current preloadedData is empty if (Object.keys(preloadedData).length === 0) return; - const { segments = {}, since = -1, flags = [] } = preloadedData; + const { segments = {}, since = -1, flags = [], rbSince = -1, rbSegments = [] } = preloadedData; if (storage.splits) { const storedSince = storage.splits.getChangeNumber(); @@ -34,6 +35,19 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits storage.splits.update(flags as ISplit[], [], since); } + if (storage.rbSegments) { + const storedSince = storage.rbSegments.getChangeNumber(); + + // Do not load data if current data is more recent + if (storedSince > rbSince) return; + + // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data + storage.rbSegments.clear(); + + // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data + storage.rbSegments.update(rbSegments as IRBSegment[], [], rbSince); + } + if (matchingKey) { // add memberships data (client-side) let memberships = preloadedData.memberships && preloadedData.memberships[matchingKey]; if (!memberships && segments) { @@ -61,9 +75,10 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { return { - // lastUpdated: Date.now(), since: storage.splits.getChangeNumber(), flags: storage.splits.getAll(), + rbSince: storage.rbSegments.getChangeNumber(), + rbSegments: storage.rbSegments.getAll(), segments: userKeys ? undefined : // @ts-ignore accessing private prop Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop diff --git a/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts b/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts index 37f6ad8e..cfc68cf5 100644 --- a/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts @@ -105,6 +105,10 @@ export class RBSegmentsCacheInLocal implements IRBSegmentsCacheSync { return item && JSON.parse(item); } + getAll(): IRBSegment[] { + return this.getNames().map(key => this.get(key)!); + } + contains(names: Set): boolean { const namesArray = setToArray(names); const namesInStorage = this.getNames(); diff --git a/src/storages/inMemory/RBSegmentsCacheInMemory.ts b/src/storages/inMemory/RBSegmentsCacheInMemory.ts index 568b0deb..2b876202 100644 --- a/src/storages/inMemory/RBSegmentsCacheInMemory.ts +++ b/src/storages/inMemory/RBSegmentsCacheInMemory.ts @@ -51,6 +51,10 @@ export class RBSegmentsCacheInMemory implements IRBSegmentsCacheSync { return this.cache[name] || null; } + getAll(): IRBSegment[] { + return this.getNames().map(key => this.get(key)!); + } + contains(names: Set): boolean { const namesArray = setToArray(names); const namesInStorage = this.getNames(); diff --git a/src/storages/types.ts b/src/storages/types.ts index 1721360a..553722c7 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -235,6 +235,7 @@ export interface IRBSegmentsCacheSync extends IRBSegmentsCacheBase { update(toAdd: IRBSegment[], toRemove: IRBSegment[], changeNumber: number): boolean, get(name: string): IRBSegment | null, getChangeNumber(): number, + getAll(): IRBSegment[], clear(): void, contains(names: Set): boolean, // Used only for smart pausing in client-side standalone. Returns true if the storage contains a RBSegment using segments or large segments matchers diff --git a/types/splitio.d.ts b/types/splitio.d.ts index aa4bda79..a094f67e 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1031,14 +1031,17 @@ declare namespace SplitIO { */ type PreloadedData = { /** - * Change number of the preloaded data. - * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. + * Change number of feature flags. */ since: number; /** * List of feature flags. */ flags: Object[], + /** + * Change number of rule-based segments. + */ + rbSince?: number, /** * List of rule-based segments. */ From e618b7fb807949f28b31b2bfa48071b22c514000 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 26 Aug 2025 09:33:15 -0300 Subject: [PATCH 13/30] Polishing --- CHANGES.txt | 2 +- src/storages/__tests__/dataLoader.spec.ts | 96 ++++++++++++++++------ src/storages/dataLoader.ts | 89 +++++++++----------- src/storages/inMemory/InMemoryStorageCS.ts | 48 ++++++----- src/storages/types.ts | 2 +- types/splitio.d.ts | 7 ++ 6 files changed, 147 insertions(+), 97 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 336e80c9..66d95bb4 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,5 +1,5 @@ 2.5.0 (August XX, 2025) - - Added `factory.getState()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. + - Added `factory.getCache()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index d1ab77c8..2ef06f96 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -1,33 +1,81 @@ import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; +import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; +import { IRBSegment, ISplit } from '../../dtos/types'; import * as dataLoader from '../dataLoader'; -test('loadData & getSnapshot', () => { - jest.spyOn(dataLoader, 'loadData'); +describe('setCache & getCache', () => { + jest.spyOn(dataLoader, 'setCache'); const onReadyFromCacheCb = jest.fn(); - // @ts-expect-error - const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); // @ts-expect-error - serverStorage.splits.update([{ name: 'split1' }], [], 123); // @ts-expect-error - serverStorage.rbSegments.update([{ name: 'rbs1' }], [], 321); - serverStorage.segments.update('segment1', [fullSettings.core.key as string], [], 123); - - const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); - - // @ts-expect-error - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb }); - - // Assert - expect(dataLoader.loadData).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); - expect(dataLoader.getSnapshot(clientStorage, [fullSettings.core.key as string])).toEqual(preloadedData); - expect(preloadedData).toEqual({ - since: 123, - flags: [{ name: 'split1' }], - rbSince: 321, - rbSegments: [{ name: 'rbs1' }], - memberships: { [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } }, - segments: undefined + const onReadyCb = jest.fn(); + + const otherKey = 'otherKey'; + + // @ts-expect-error Load server-side storage + const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); + serverStorage.splits.update([{ name: 'split1' } as ISplit], [], 123); + serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); + serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); + + afterEach(() => { + jest.clearAllMocks(); + }); + + test('using preloaded data with memberships', () => { + const preloadedData = dataLoader.getCache(loggerMock, serverStorage, [fullSettings.core.key as string, otherKey]); + + // Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setCache).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + expect(dataLoader.setCache).toBeCalledTimes(2); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + + // Get preloaded data from client-side storage + expect(dataLoader.getCache(loggerMock, clientStorage, [fullSettings.core.key as string, otherKey])).toEqual(preloadedData); + expect(preloadedData).toEqual({ + since: 123, + flags: [{ name: 'split1' }], + rbSince: 321, + rbSegments: [{ name: 'rbs1' }], + memberships: { + [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, + [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } + }, + segments: undefined + }); + }); + + test('using preloaded data with segments', () => { + const preloadedData = dataLoader.getCache(loggerMock, serverStorage); + + // Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setCache).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + expect(dataLoader.setCache).toBeCalledTimes(2); + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + + expect(preloadedData).toEqual({ + since: 123, + flags: [{ name: 'split1' }], + rbSince: 321, + rbSegments: [{ name: 'rbs1' }], + memberships: undefined, + segments: { + segment1: [fullSettings.core.key as string, otherKey] + } + }); }); }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index c51986b9..e5ec31c4 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -3,58 +3,39 @@ import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync, IStorageSyn import { setToArray } from '../utils/lang/sets'; import { getMatching } from '../utils/key'; import { IMembershipsResponse, IMySegmentsResponse, IRBSegment, ISplit } from '../dtos/types'; +import { ILogger } from '../logger/types'; /** - * - * @param preloadedData - validated data - * @param storage - object containing `splits` and `segments` cache (client-side variant) - * @param userKey - user key (matching key) of the provided MySegmentsCache - * - * @TODO load data even if current data is more recent? - * @TODO extend to load largeSegments - * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. - * @TODO add logs, and input validation in this module, in favor of size reduction. - * @TODO unit tests + * Sets the given synchronous storage with the provided preloaded data snapshot. + * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). + * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). */ -export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { +export function setCache(log: ILogger, preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { // Do not load data if current preloadedData is empty if (Object.keys(preloadedData).length === 0) return; - const { segments = {}, since = -1, flags = [], rbSince = -1, rbSegments = [] } = preloadedData; + const { splits, rbSegments, segments, largeSegments } = storage; - if (storage.splits) { - const storedSince = storage.splits.getChangeNumber(); + log.debug(`set cache${matchingKey ? ` for key ${matchingKey}` : ''}`); - // Do not load data if current data is more recent - if (storedSince > since) return; - - // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data - storage.splits.clear(); - - // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.splits.update(flags as ISplit[], [], since); + if (splits) { + splits.clear(); + splits.update(preloadedData.flags as ISplit[] || [], [], preloadedData.since || -1); } - if (storage.rbSegments) { - const storedSince = storage.rbSegments.getChangeNumber(); - - // Do not load data if current data is more recent - if (storedSince > rbSince) return; - - // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data - storage.rbSegments.clear(); - - // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.rbSegments.update(rbSegments as IRBSegment[], [], rbSince); + if (rbSegments) { + rbSegments.clear(); + rbSegments.update(preloadedData.rbSegments as IRBSegment[] || [], [], preloadedData.rbSince || -1); } + const segmentsData = preloadedData.segments || {}; if (matchingKey) { // add memberships data (client-side) let memberships = preloadedData.memberships && preloadedData.memberships[matchingKey]; - if (!memberships && segments) { + if (!memberships && segmentsData) { memberships = { ms: { - k: Object.keys(segments).filter(segmentName => { - const segmentKeys = segments[segmentName]; + k: Object.keys(segmentsData).filter(segmentName => { + const segmentKeys = segmentsData[segmentName]; return segmentKeys.indexOf(matchingKey) > -1; }).map(segmentName => ({ n: segmentName })) } @@ -62,54 +43,60 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits } if (memberships) { - if ((memberships as IMembershipsResponse).ms) storage.segments.resetSegments((memberships as IMembershipsResponse).ms!); - if ((memberships as IMembershipsResponse).ls && storage.largeSegments) storage.largeSegments.resetSegments((memberships as IMembershipsResponse).ls!); + if ((memberships as IMembershipsResponse).ms) segments.resetSegments((memberships as IMembershipsResponse).ms!); + if ((memberships as IMembershipsResponse).ls && largeSegments) largeSegments.resetSegments((memberships as IMembershipsResponse).ls!); } } else { // add segments data (server-side) - Object.keys(segments).forEach(segmentName => { - const segmentKeys = segments[segmentName]; - storage.segments.update(segmentName, segmentKeys, [], -1); + Object.keys(segmentsData).forEach(segmentName => { + const segmentKeys = segmentsData[segmentName]; + segments.update(segmentName, segmentKeys, [], -1); }); } } -export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { +/** + * Gets the preloaded data snapshot from the given synchronous storage. + * If `keys` are provided, the memberships for those keys is returned, to protect segments data. + * Otherwise, the segments data is returned. + */ +export function getCache(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { + + log.debug(`get cache${keys ? ` for keys ${keys}` : ''}`); + return { since: storage.splits.getChangeNumber(), flags: storage.splits.getAll(), rbSince: storage.rbSegments.getChangeNumber(), rbSegments: storage.rbSegments.getAll(), - segments: userKeys ? + segments: keys ? undefined : // @ts-ignore accessing private prop Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop prev[cur] = setToArray(storage.segments.segmentCache[cur] as Set); return prev; }, {}), - memberships: userKeys ? - userKeys.reduce>((prev, userKey) => { + memberships: keys ? + keys.reduce>((prev, key) => { if (storage.shared) { // Client-side segments // @ts-ignore accessing private prop - const sharedStorage = storage.shared(userKey); - prev[getMatching(userKey)] = { + const sharedStorage = storage.shared(key); + prev[getMatching(key)] = { ms: { // @ts-ignore accessing private prop k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), - // cn: sharedStorage.segments.getChangeNumber() }, ls: sharedStorage.largeSegments ? { // @ts-ignore accessing private prop k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), - // cn: sharedStorage.largeSegments.getChangeNumber() } : undefined }; } else { - prev[getMatching(userKey)] = { + prev[getMatching(key)] = { ms: { // Server-side segments // @ts-ignore accessing private prop k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop - return storage.segments.segmentCache[segmentName].has(userKey) ? + return storage.segments.segmentCache[segmentName].has(key) ? prev!.concat({ n: segmentName }) : prev; }, []) diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index dacfab31..c8c47501 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -8,7 +8,7 @@ import { LOCALHOST_MODE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; import { getMatching } from '../../utils/key'; -import { loadData } from '../dataLoader'; +import { setCache } from '../dataLoader'; import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; /** @@ -17,7 +17,9 @@ import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; * @param params - parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; + const { settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; + + const storages: Record = {}; const splits = new SplitsCacheInMemory(__splitFiltersValidation); const rbSegments = new RBSegmentsCacheInMemory(); @@ -39,26 +41,30 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag // When using shared instantiation with MEMORY we reuse everything but segments (they are unique per key) shared(matchingKey: string) { - const segments = new MySegmentsCacheInMemory(); - const largeSegments = new MySegmentsCacheInMemory(); + if (!storages[matchingKey]) { + const segments = new MySegmentsCacheInMemory(); + const largeSegments = new MySegmentsCacheInMemory(); - if (preloadedData) { - loadData(preloadedData, { segments, largeSegments }, matchingKey); - } + if (preloadedData) { + setCache(log, preloadedData, { segments, largeSegments }, matchingKey); + } - return { - splits: this.splits, - rbSegments: this.rbSegments, - segments, - largeSegments, - impressions: this.impressions, - impressionCounts: this.impressionCounts, - events: this.events, - telemetry: this.telemetry, - uniqueKeys: this.uniqueKeys, + storages[matchingKey] = { + splits: this.splits, + rbSegments: this.rbSegments, + segments, + largeSegments, + impressions: this.impressions, + impressionCounts: this.impressionCounts, + events: this.events, + telemetry: this.telemetry, + uniqueKeys: this.uniqueKeys, + + destroy() { } + }; + } - destroy() { } - }; + return storages[matchingKey]; }, }; @@ -72,9 +78,11 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag storage.uniqueKeys.track = noopTrack; } + const matchingKey = getMatching(params.settings.core.key); + storages[matchingKey] = storage; if (preloadedData) { - loadData(preloadedData, storage, getMatching(params.settings.core.key)); + setCache(log, preloadedData, storage, matchingKey); if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); } diff --git a/src/storages/types.ts b/src/storages/types.ts index 553722c7..0b3680f5 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -466,7 +466,7 @@ export interface IStorageBase< telemetry?: TTelemetryCache, uniqueKeys: TUniqueKeysCache, destroy(): void | Promise, - shared?: (matchingKey: string, onReadyCb: (error?: any) => void) => this + shared?: (matchingKey: string, onReadyCb?: (error?: any) => void) => this } export interface IStorageSync extends IStorageBase< diff --git a/types/splitio.d.ts b/types/splitio.d.ts index a094f67e..5bd3a5cc 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1603,6 +1603,13 @@ declare namespace SplitIO { * @returns The manager instance. */ manager(): IManager; + /** + * Returns the current snapshot of the SDK rollout plan in cache. + * + * @param keys - Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys, rather than the complete segments data. + * @returns The current snapshot of the SDK rollout plan. + */ + getCache(keys?: SplitKey[]): PreloadedData, } /** * This represents the interface for the SDK instance for server-side with asynchronous storage. From ed482aef449f946c1cf956f98edc7bdf6782ad97 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 26 Aug 2025 09:39:26 -0300 Subject: [PATCH 14/30] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index aa7cf6d8..f9c7ba15 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.4.1", + "version": "2.5.0-rc.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.4.1", + "version": "2.5.0-rc.0", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index 27b15da2..47c53107 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.4.1", + "version": "2.5.0-rc.0", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From b70d121f94dbb6ab6c3266d484f156a01f858f6d Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 26 Aug 2025 09:47:47 -0300 Subject: [PATCH 15/30] Polishing --- CHANGES.txt | 1 - src/storages/types.ts | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 66d95bb4..02a5be3e 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,7 +1,6 @@ 2.5.0 (August XX, 2025) - Added `factory.getCache()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. - - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. 2.4.1 (June 3, 2025) - Bugfix - Improved the Proxy fallback to flag spec version 1.2 to handle cases where the Proxy does not return an end-of-stream marker in 400 status code responses. diff --git a/src/storages/types.ts b/src/storages/types.ts index 0b3680f5..53b049ed 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -505,7 +505,7 @@ export interface IStorageFactoryParams { */ onReadyCb: (error?: any) => void, /** - * It is meant for emitting SDK_READY_FROM_CACHE event in standalone mode with preloaded data + * For emitting SDK_READY_FROM_CACHE event in consumer mode with Redis and standalone mode with preloaded data */ onReadyFromCacheCb: () => void, } From f7dd0a1c0979fa5ab9baff9b2a5c5d152937aec5 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 26 Aug 2025 12:20:32 -0300 Subject: [PATCH 16/30] Rename new methods --- CHANGES.txt | 4 +-- src/storages/__tests__/dataLoader.spec.ts | 26 +++++++++---------- src/storages/dataLoader.ts | 24 ++++++++--------- src/storages/inLocalStorage/index.ts | 6 ++--- src/storages/inMemory/InMemoryStorageCS.ts | 12 ++++----- types/splitio.d.ts | 30 +++++++++++----------- 6 files changed, 51 insertions(+), 51 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 02a5be3e..50aef19e 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,6 @@ 2.5.0 (August XX, 2025) - - Added `factory.getCache()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. + - Added `factory.getRolloutPlan()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. + - Added `initialRolloutPlan` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. 2.4.1 (June 3, 2025) - Bugfix - Improved the Proxy fallback to flag spec version 1.2 to handle cases where the Proxy does not return an end-of-stream marker in 400 status code responses. diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 2ef06f96..02f556d1 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -6,8 +6,8 @@ import { IRBSegment, ISplit } from '../../dtos/types'; import * as dataLoader from '../dataLoader'; -describe('setCache & getCache', () => { - jest.spyOn(dataLoader, 'setCache'); +describe('setRolloutPlan & getRolloutPlan', () => { + jest.spyOn(dataLoader, 'setRolloutPlan'); const onReadyFromCacheCb = jest.fn(); const onReadyCb = jest.fn(); @@ -24,23 +24,23 @@ describe('setCache & getCache', () => { }); test('using preloaded data with memberships', () => { - const preloadedData = dataLoader.getCache(loggerMock, serverStorage, [fullSettings.core.key as string, otherKey]); + const rolloutPlanData = dataLoader.getRolloutPlan(loggerMock, serverStorage, [fullSettings.core.key as string, otherKey]); // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setCache).toBeCalledTimes(1); + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlanData }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); expect(onReadyFromCacheCb).toBeCalledTimes(1); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setCache).toBeCalledTimes(2); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // Get preloaded data from client-side storage - expect(dataLoader.getCache(loggerMock, clientStorage, [fullSettings.core.key as string, otherKey])).toEqual(preloadedData); - expect(preloadedData).toEqual({ + expect(dataLoader.getRolloutPlan(loggerMock, clientStorage, [fullSettings.core.key as string, otherKey])).toEqual(rolloutPlanData); + expect(rolloutPlanData).toEqual({ since: 123, flags: [{ name: 'split1' }], rbSince: 321, @@ -54,20 +54,20 @@ describe('setCache & getCache', () => { }); test('using preloaded data with segments', () => { - const preloadedData = dataLoader.getCache(loggerMock, serverStorage); + const rolloutPlanData = dataLoader.getRolloutPlan(loggerMock, serverStorage); // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setCache).toBeCalledTimes(1); + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlanData }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); expect(onReadyFromCacheCb).toBeCalledTimes(1); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setCache).toBeCalledTimes(2); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); - expect(preloadedData).toEqual({ + expect(rolloutPlanData).toEqual({ since: 123, flags: [{ name: 'split1' }], rbSince: 321, diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index e5ec31c4..51340a0c 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -6,31 +6,31 @@ import { IMembershipsResponse, IMySegmentsResponse, IRBSegment, ISplit } from '. import { ILogger } from '../logger/types'; /** - * Sets the given synchronous storage with the provided preloaded data snapshot. + * Sets the given synchronous storage with the provided rollout plan snapshot. * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). */ -export function setCache(log: ILogger, preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { - // Do not load data if current preloadedData is empty - if (Object.keys(preloadedData).length === 0) return; +export function setRolloutPlan(log: ILogger, rolloutPlan: SplitIO.RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { + // Do not load data if current rollout plan is empty + if (Object.keys(rolloutPlan).length === 0) return; const { splits, rbSegments, segments, largeSegments } = storage; - log.debug(`set cache${matchingKey ? ` for key ${matchingKey}` : ''}`); + log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); if (splits) { splits.clear(); - splits.update(preloadedData.flags as ISplit[] || [], [], preloadedData.since || -1); + splits.update(rolloutPlan.flags as ISplit[] || [], [], rolloutPlan.since || -1); } if (rbSegments) { rbSegments.clear(); - rbSegments.update(preloadedData.rbSegments as IRBSegment[] || [], [], preloadedData.rbSince || -1); + rbSegments.update(rolloutPlan.rbSegments as IRBSegment[] || [], [], rolloutPlan.rbSince || -1); } - const segmentsData = preloadedData.segments || {}; + const segmentsData = rolloutPlan.segments || {}; if (matchingKey) { // add memberships data (client-side) - let memberships = preloadedData.memberships && preloadedData.memberships[matchingKey]; + let memberships = rolloutPlan.memberships && rolloutPlan.memberships[matchingKey]; if (!memberships && segmentsData) { memberships = { ms: { @@ -55,13 +55,13 @@ export function setCache(log: ILogger, preloadedData: SplitIO.PreloadedData, sto } /** - * Gets the preloaded data snapshot from the given synchronous storage. + * Gets the rollout plan snapshot from the given synchronous storage. * If `keys` are provided, the memberships for those keys is returned, to protect segments data. * Otherwise, the segments data is returned. */ -export function getCache(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { +export function getRolloutPlan(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): SplitIO.RolloutPlan { - log.debug(`get cache${keys ? ` for keys ${keys}` : ''}`); + log.debug(`storage: get feature flags and segments${keys ? ` for keys ${keys}` : ''}`); return { since: storage.splits.getChangeNumber(), diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index 8924b84d..03d5bfc1 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -26,9 +26,9 @@ export function InLocalStorage(options: SplitIO.InLocalStorageOptions = {}): ISt function InLocalStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - // Fallback to InMemoryStorage if LocalStorage API is not available - if (!isLocalStorageAvailable()) { - params.settings.log.warn(LOG_PREFIX + 'LocalStorage API is unavailable. Falling back to default MEMORY storage'); + // Fallback to InMemoryStorage if LocalStorage API is not available or preloaded data is provided + if (!isLocalStorageAvailable() || params.settings.initialRolloutPlan) { + params.settings.log.warn(LOG_PREFIX + 'LocalStorage API is unavailable or `initialRolloutPlan` is provided. Falling back to default MEMORY storage'); return InMemoryStorageCSFactory(params); } diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index c8c47501..e6b5becc 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -8,7 +8,7 @@ import { LOCALHOST_MODE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; import { getMatching } from '../../utils/key'; -import { setCache } from '../dataLoader'; +import { setRolloutPlan } from '../dataLoader'; import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; /** @@ -17,7 +17,7 @@ import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; * @param params - parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; + const { settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation }, initialRolloutPlan }, onReadyFromCacheCb } = params; const storages: Record = {}; @@ -45,8 +45,8 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag const segments = new MySegmentsCacheInMemory(); const largeSegments = new MySegmentsCacheInMemory(); - if (preloadedData) { - setCache(log, preloadedData, { segments, largeSegments }, matchingKey); + if (initialRolloutPlan) { + setRolloutPlan(log, initialRolloutPlan, { segments, largeSegments }, matchingKey); } storages[matchingKey] = { @@ -81,8 +81,8 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag const matchingKey = getMatching(params.settings.core.key); storages[matchingKey] = storage; - if (preloadedData) { - setCache(log, preloadedData, storage, matchingKey); + if (initialRolloutPlan) { + setRolloutPlan(log, initialRolloutPlan, storage, matchingKey); if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); } diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 5bd3a5cc..0edf8500 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -353,7 +353,7 @@ interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISync /** * Data to initialize the SDK storage with. If provided and valid, the SDK will be ready from cache immediately. */ - preloadedData?: SplitIO.PreloadedData; + initialRolloutPlan?: SplitIO.RolloutPlan; /** * SDK Startup settings. */ @@ -559,7 +559,7 @@ declare namespace SplitIO { eventsFirstPushWindow: number; }; readonly storage: StorageSyncFactory | StorageAsyncFactory | StorageOptions; - readonly preloadedData?: SplitIO.PreloadedData; + readonly initialRolloutPlan?: SplitIO.RolloutPlan; readonly urls: { events: string; sdk: string; @@ -1025,11 +1025,11 @@ declare namespace SplitIO { type: NodeSyncStorage | NodeAsyncStorage | BrowserStorage; prefix?: string; options?: Object; - } + }; /** - * Defines the format of rollout plan data to preload the factory storage (cache). + * Defines the format of rollout plan data to preload the SDK cache. */ - type PreloadedData = { + type RolloutPlan = { /** * Change number of feature flags. */ @@ -1037,29 +1037,29 @@ declare namespace SplitIO { /** * List of feature flags. */ - flags: Object[], + flags: Object[]; /** * Change number of rule-based segments. */ - rbSince?: number, + rbSince?: number; /** * List of rule-based segments. */ - rbSegments?: Object[], + rbSegments?: Object[]; /** * Optional map of user keys to their memberships. */ memberships?: { - [key: string]: Object - }, + [key: string]: Object; + }; /** * Optional map of segments to their list of keys. * This property is ignored if `memberships` is provided. */ segments?: { - [segmentName: string]: string[] - }, - } + [segmentName: string]: string[]; + }; + }; /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. @@ -1082,7 +1082,7 @@ declare namespace SplitIO { type IntegrationFactory = { readonly type: string; (params: any): (Integration | void); - } + }; /** * A pair of user key and it's trafficType, required for tracking valid Split events. */ @@ -1609,7 +1609,7 @@ declare namespace SplitIO { * @param keys - Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys, rather than the complete segments data. * @returns The current snapshot of the SDK rollout plan. */ - getCache(keys?: SplitKey[]): PreloadedData, + getRolloutPlan(keys?: SplitKey[]): RolloutPlan; } /** * This represents the interface for the SDK instance for server-side with asynchronous storage. From b937db52ba01dfd67e5214ffba1e9fc54bdd74c6 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 27 Aug 2025 11:12:22 -0300 Subject: [PATCH 17/30] Remove outdated validation utils --- .../__tests__/preloadedData.spec.ts | 157 ------------------ src/utils/inputValidation/index.ts | 1 - src/utils/inputValidation/preloadedData.ts | 57 ------- 3 files changed, 215 deletions(-) delete mode 100644 src/utils/inputValidation/__tests__/preloadedData.spec.ts delete mode 100644 src/utils/inputValidation/preloadedData.ts diff --git a/src/utils/inputValidation/__tests__/preloadedData.spec.ts b/src/utils/inputValidation/__tests__/preloadedData.spec.ts deleted file mode 100644 index 79f1d1a4..00000000 --- a/src/utils/inputValidation/__tests__/preloadedData.spec.ts +++ /dev/null @@ -1,157 +0,0 @@ -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; - -// Import the module mocking the logger. -import { validatePreloadedData } from '../preloadedData'; - -const method = 'some_method'; -const testCases = [ - // valid inputs - { - input: { lastUpdated: 10, since: 10, splitsData: {} }, - output: true, - warn: `${method}: preloadedData.splitsData doesn't contain feature flag definitions.` - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { 'some_key': [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { 'some_key': [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: {} }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: ['some_segment'] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, segmentsData: {} }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, segmentsData: { some_segment: 'SEGMENT DEFINITION' } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: ['some_segment'], some_other_key: ['some_segment'] }, segmentsData: { some_segment: 'SEGMENT DEFINITION', some_other_segment: 'SEGMENT DEFINITION' } }, - output: true - }, - { - // should be true, even using objects for strings and numbers or having extra properties - input: { ignoredProperty: 'IGNORED', lastUpdated: new Number(10), since: new Number(10), splitsData: { 'some_split': new String('SPLIT DEFINITION') }, mySegmentsData: { some_key: [new String('some_segment')] }, segmentsData: { some_segment: new String('SEGMENT DEFINITION') } }, - output: true - }, - - // invalid inputs - { - // should be false if preloadedData is not an object - input: undefined, - output: false, - error: `${method}: preloadedData must be an object.` - }, - { - // should be false if preloadedData is not an object - input: [], - output: false, - error: `${method}: preloadedData must be an object.` - }, - { - // should be false if lastUpdated property is invalid - input: { lastUpdated: undefined, since: 10, splitsData: {} }, - output: false, - error: `${method}: preloadedData.lastUpdated must be a positive number.` - }, - { - // should be false if lastUpdated property is invalid - input: { lastUpdated: -1, since: 10, splitsData: {} }, - output: false, - error: `${method}: preloadedData.lastUpdated must be a positive number.` - }, - { - // should be false if since property is invalid - input: { lastUpdated: 10, since: undefined, splitsData: {} }, - output: false, - error: `${method}: preloadedData.since must be a positive number.` - }, - { - // should be false if since property is invalid - input: { lastUpdated: 10, since: -1, splitsData: {} }, - output: false, - error: `${method}: preloadedData.since must be a positive number.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: undefined }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: undefined } }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if mySegmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, mySegmentsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.` - }, - { - // should be false if mySegmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, mySegmentsData: { some_key: undefined } }, - output: false, - error: `${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.` - }, - { - // should be false if segmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, segmentsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.` - }, - { - // should be false if segmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, segmentsData: { some_segment: undefined } }, - output: false, - error: `${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.` - } -]; - -test('INPUT VALIDATION for preloadedData', () => { - - for (let i = 0; i < testCases.length; i++) { - const testCase = testCases[i]; - expect(validatePreloadedData(loggerMock, testCase.input, method)).toBe(testCase.output); - - if (testCase.error) { - expect(loggerMock.error.mock.calls[0]).toEqual([testCase.error]); // Should log the error for the invalid preloadedData. - loggerMock.error.mockClear(); - } else { - expect(loggerMock.error).not.toBeCalled(); // Should not log any error. - } - - if (testCase.warn) { - expect(loggerMock.warn.mock.calls[0]).toEqual([testCase.warn]); // Should log the warning for the given preloadedData. - loggerMock.warn.mockClear(); - } else { - expect(loggerMock.warn).not.toBeCalled(); // Should not log any warning. - } - } -}); diff --git a/src/utils/inputValidation/index.ts b/src/utils/inputValidation/index.ts index 96cf4be6..eac9777d 100644 --- a/src/utils/inputValidation/index.ts +++ b/src/utils/inputValidation/index.ts @@ -10,5 +10,4 @@ export { validateTrafficType } from './trafficType'; export { validateIfNotDestroyed, validateIfOperational } from './isOperational'; export { validateSplitExistence } from './splitExistence'; export { validateTrafficTypeExistence } from './trafficTypeExistence'; -export { validatePreloadedData } from './preloadedData'; export { validateEvaluationOptions } from './eventProperties'; diff --git a/src/utils/inputValidation/preloadedData.ts b/src/utils/inputValidation/preloadedData.ts deleted file mode 100644 index f07ee432..00000000 --- a/src/utils/inputValidation/preloadedData.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { isObject, isString, isFiniteNumber } from '../lang'; -import { validateSplit } from './split'; -import { ILogger } from '../../logger/types'; - -function validateTimestampData(log: ILogger, maybeTimestamp: any, method: string, item: string) { - if (isFiniteNumber(maybeTimestamp) && maybeTimestamp > -1) return true; - log.error(`${method}: preloadedData.${item} must be a positive number.`); - return false; -} - -function validateSplitsData(log: ILogger, maybeSplitsData: any, method: string) { - if (isObject(maybeSplitsData)) { - const splitNames = Object.keys(maybeSplitsData); - if (splitNames.length === 0) log.warn(`${method}: preloadedData.splitsData doesn't contain feature flag definitions.`); - // @TODO in the future, consider handling the possibility of having parsed definitions of splits - if (splitNames.every(splitName => validateSplit(log, splitName, method) && isString(maybeSplitsData[splitName]))) return true; - } - log.error(`${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.`); - return false; -} - -function validateMySegmentsData(log: ILogger, maybeMySegmentsData: any, method: string) { - if (isObject(maybeMySegmentsData)) { - const userKeys = Object.keys(maybeMySegmentsData); - if (userKeys.every(userKey => { - const segmentNames = maybeMySegmentsData[userKey]; - // an empty list is valid - return Array.isArray(segmentNames) && segmentNames.every(segmentName => isString(segmentName)); - })) return true; - } - log.error(`${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.`); - return false; -} - -function validateSegmentsData(log: ILogger, maybeSegmentsData: any, method: string) { - if (isObject(maybeSegmentsData)) { - const segmentNames = Object.keys(maybeSegmentsData); - if (segmentNames.every(segmentName => isString(maybeSegmentsData[segmentName]))) return true; - } - log.error(`${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.`); - return false; -} - -export function validatePreloadedData(log: ILogger, maybePreloadedData: any, method: string) { - if (!isObject(maybePreloadedData)) { - log.error(`${method}: preloadedData must be an object.`); - } else if ( - validateTimestampData(log, maybePreloadedData.lastUpdated, method, 'lastUpdated') && - validateTimestampData(log, maybePreloadedData.since, method, 'since') && - validateSplitsData(log, maybePreloadedData.splitsData, method) && - (!maybePreloadedData.mySegmentsData || validateMySegmentsData(log, maybePreloadedData.mySegmentsData, method)) && - (!maybePreloadedData.segmentsData || validateSegmentsData(log, maybePreloadedData.segmentsData, method)) - ) { - return true; - } - return false; -} From a95edb9c2ea8d1eef488fc2b6485db5e104a665d Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 27 Aug 2025 15:00:31 -0300 Subject: [PATCH 18/30] refactor type definitions --- src/storages/dataLoader.ts | 44 ++++++++++++++++++++++++++++++++------ src/types.ts | 2 ++ types/splitio.d.ts | 37 ++++---------------------------- 3 files changed, 44 insertions(+), 39 deletions(-) diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 51340a0c..860bde59 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -5,12 +5,44 @@ import { getMatching } from '../utils/key'; import { IMembershipsResponse, IMySegmentsResponse, IRBSegment, ISplit } from '../dtos/types'; import { ILogger } from '../logger/types'; +export type RolloutPlan = { + /** + * Change number of feature flags. + */ + since: number; + /** + * List of feature flags. + */ + flags: ISplit[]; + /** + * Change number of rule-based segments. + */ + rbSince?: number; + /** + * List of rule-based segments. + */ + rbSegments?: IRBSegment[]; + /** + * Optional map of user keys to their memberships. + */ + memberships?: { + [key: string]: IMembershipsResponse; + }; + /** + * Optional map of standard segments to their list of keys. + * This property is ignored if `memberships` is provided. + */ + segments?: { + [segmentName: string]: string[]; + }; +}; + /** * Sets the given synchronous storage with the provided rollout plan snapshot. * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). */ -export function setRolloutPlan(log: ILogger, rolloutPlan: SplitIO.RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { +export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { // Do not load data if current rollout plan is empty if (Object.keys(rolloutPlan).length === 0) return; @@ -20,12 +52,12 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: SplitIO.RolloutPlan, s if (splits) { splits.clear(); - splits.update(rolloutPlan.flags as ISplit[] || [], [], rolloutPlan.since || -1); + splits.update(rolloutPlan.flags || [], [], rolloutPlan.since || -1); } if (rbSegments) { rbSegments.clear(); - rbSegments.update(rolloutPlan.rbSegments as IRBSegment[] || [], [], rolloutPlan.rbSince || -1); + rbSegments.update(rolloutPlan.rbSegments || [], [], rolloutPlan.rbSince || -1); } const segmentsData = rolloutPlan.segments || {}; @@ -43,8 +75,8 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: SplitIO.RolloutPlan, s } if (memberships) { - if ((memberships as IMembershipsResponse).ms) segments.resetSegments((memberships as IMembershipsResponse).ms!); - if ((memberships as IMembershipsResponse).ls && largeSegments) largeSegments.resetSegments((memberships as IMembershipsResponse).ls!); + if (memberships.ms) segments.resetSegments(memberships.ms!); + if (memberships.ls && largeSegments) largeSegments.resetSegments(memberships.ls!); } } else { // add segments data (server-side) Object.keys(segmentsData).forEach(segmentName => { @@ -59,7 +91,7 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: SplitIO.RolloutPlan, s * If `keys` are provided, the memberships for those keys is returned, to protect segments data. * Otherwise, the segments data is returned. */ -export function getRolloutPlan(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): SplitIO.RolloutPlan { +export function getRolloutPlan(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): RolloutPlan { log.debug(`storage: get feature flags and segments${keys ? ` for keys ${keys}` : ''}`); diff --git a/src/types.ts b/src/types.ts index ab3e74bb..be4132a1 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,6 +1,7 @@ import SplitIO from '../types/splitio'; import { ISplitFiltersValidation } from './dtos/types'; import { ILogger } from './logger/types'; +import { RolloutPlan } from './storages/dataLoader'; /** * SplitIO.ISettings interface extended with private properties for internal use @@ -10,6 +11,7 @@ export interface ISettings extends SplitIO.ISettings { __splitFiltersValidation: ISplitFiltersValidation; }; readonly log: ILogger; + readonly initialRolloutPlan?: RolloutPlan; } /** diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 0edf8500..3ffc3db6 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -351,7 +351,8 @@ interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISync */ features?: SplitIO.MockedFeaturesMap; /** - * Data to initialize the SDK storage with. If provided and valid, the SDK will be ready from cache immediately. + * Rollout plan object (i.e., feature flags and segment definitions) to initialize the SDK storage with. If provided and valid, the SDK will be ready from cache immediately. + * This object is derived from calling the Node.js SDK’s `getRolloutPlan` method. */ initialRolloutPlan?: SplitIO.RolloutPlan; /** @@ -1027,39 +1028,9 @@ declare namespace SplitIO { options?: Object; }; /** - * Defines the format of rollout plan data to preload the SDK cache. + * A JSON-serializable plain object that defines the format of rollout plan data to preload the SDK cache with feature flags and segments. */ - type RolloutPlan = { - /** - * Change number of feature flags. - */ - since: number; - /** - * List of feature flags. - */ - flags: Object[]; - /** - * Change number of rule-based segments. - */ - rbSince?: number; - /** - * List of rule-based segments. - */ - rbSegments?: Object[]; - /** - * Optional map of user keys to their memberships. - */ - memberships?: { - [key: string]: Object; - }; - /** - * Optional map of segments to their list of keys. - * This property is ignored if `memberships` is provided. - */ - segments?: { - [segmentName: string]: string[]; - }; - }; + type RolloutPlan = Object; /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. From 5b84df372909f44e19ac9db1ba1e2f0c890bf58e Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 2 Sep 2025 17:03:29 -0300 Subject: [PATCH 19/30] refactor: restructure rollout plan data format and improve data loading --- src/dtos/types.ts | 2 +- src/storages/__tests__/dataLoader.spec.ts | 91 ++++++++++++------ src/storages/dataLoader.ts | 107 ++++++++++++---------- types/splitio.d.ts | 19 +++- 4 files changed, 142 insertions(+), 77 deletions(-) diff --git a/src/dtos/types.ts b/src/dtos/types.ts index 78d62de4..a72b751b 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -259,7 +259,7 @@ export interface ISegmentChangesResponse { name: string, added: string[], removed: string[], - since: number, + since?: number, till: number } diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 02f556d1..41dbde30 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -6,7 +6,7 @@ import { IRBSegment, ISplit } from '../../dtos/types'; import * as dataLoader from '../dataLoader'; -describe('setRolloutPlan & getRolloutPlan', () => { +describe('getRolloutPlan & setRolloutPlan (client-side)', () => { jest.spyOn(dataLoader, 'setRolloutPlan'); const onReadyFromCacheCb = jest.fn(); const onReadyCb = jest.fn(); @@ -19,15 +19,52 @@ describe('setRolloutPlan & getRolloutPlan', () => { serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); + const expectedRolloutPlan = { + splitChanges: { + ff: { d: [{ name: 'split1' }], t: 123 }, + rbs: { d: [{ name: 'rbs1' }], t: 321 } + }, + memberships: { + [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, + [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } + }, + segmentChanges: [{ + name: 'segment1', + added: [fullSettings.core.key as string, otherKey], + removed: [], + till: 123 + }] + }; + afterEach(() => { jest.clearAllMocks(); }); + test('using preloaded data (no memberships, no segments)', () => { + const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage); + + // Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual([]); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual([]); + + // Get preloaded data from client-side storage + expect(dataLoader.getRolloutPlan(loggerMock, clientStorage)).toEqual(rolloutPlan); + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: undefined, segmentChanges: undefined }); + }); + test('using preloaded data with memberships', () => { - const rolloutPlanData = dataLoader.getRolloutPlan(loggerMock, serverStorage, [fullSettings.core.key as string, otherKey]); + const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string, otherKey] }); // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlanData }, onReadyFromCacheCb, onReadyCb }); + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); expect(onReadyFromCacheCb).toBeCalledTimes(1); @@ -39,43 +76,43 @@ describe('setRolloutPlan & getRolloutPlan', () => { expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // Get preloaded data from client-side storage - expect(dataLoader.getRolloutPlan(loggerMock, clientStorage, [fullSettings.core.key as string, otherKey])).toEqual(rolloutPlanData); - expect(rolloutPlanData).toEqual({ - since: 123, - flags: [{ name: 'split1' }], - rbSince: 321, - rbSegments: [{ name: 'rbs1' }], - memberships: { - [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, - [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } - }, - segments: undefined - }); + expect(dataLoader.getRolloutPlan(loggerMock, clientStorage, { keys: [fullSettings.core.key as string, otherKey] })).toEqual(rolloutPlan); + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, segmentChanges: undefined }); }); test('using preloaded data with segments', () => { - const rolloutPlanData = dataLoader.getRolloutPlan(loggerMock, serverStorage); + const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { exposeSegments: true }); // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlanData }, onReadyFromCacheCb, onReadyCb }); + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); expect(onReadyFromCacheCb).toBeCalledTimes(1); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); - expect(rolloutPlanData).toEqual({ - since: 123, - flags: [{ name: 'split1' }], - rbSince: 321, - rbSegments: [{ name: 'rbs1' }], - memberships: undefined, - segments: { - segment1: [fullSettings.core.key as string, otherKey] - } - }); + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: undefined }); + }); + + test('using preloaded data with memberships and segments', () => { + const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string], exposeSegments: true }); + + // Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // main client membership is set via the rollout plan `memberships` field + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // shared client membership is set via the rollout plan `segmentChanges` field + + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: { [fullSettings.core.key as string]: expectedRolloutPlan.memberships![fullSettings.core.key as string] } }); }); }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 860bde59..f8741af3 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -2,41 +2,37 @@ import SplitIO from '../../types/splitio'; import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; import { setToArray } from '../utils/lang/sets'; import { getMatching } from '../utils/key'; -import { IMembershipsResponse, IMySegmentsResponse, IRBSegment, ISplit } from '../dtos/types'; +import { IMembershipsResponse, IMySegmentsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; import { ILogger } from '../logger/types'; +import { isObject } from '../utils/lang'; export type RolloutPlan = { /** - * Change number of feature flags. + * Feature flags and rule-based segments. */ - since: number; + splitChanges: ISplitChangesResponse; /** - * List of feature flags. - */ - flags: ISplit[]; - /** - * Change number of rule-based segments. - */ - rbSince?: number; - /** - * List of rule-based segments. - */ - rbSegments?: IRBSegment[]; - /** - * Optional map of user keys to their memberships. + * Optional map of matching keys to their memberships. */ memberships?: { - [key: string]: IMembershipsResponse; + [matchingKey: string]: IMembershipsResponse; }; /** - * Optional map of standard segments to their list of keys. + * Optional list of standard segments. * This property is ignored if `memberships` is provided. */ - segments?: { - [segmentName: string]: string[]; - }; + segmentChanges?: ISegmentChangesResponse[]; }; +/** + * Validates if the given rollout plan is valid. + */ +function validateRolloutPlan(rolloutPlan: unknown): rolloutPlan is RolloutPlan { + if (isObject(rolloutPlan) && isObject((rolloutPlan as any).splitChanges)) return true; + + return false; +} + /** * Sets the given synchronous storage with the provided rollout plan snapshot. * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). @@ -44,32 +40,35 @@ export type RolloutPlan = { */ export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { // Do not load data if current rollout plan is empty - if (Object.keys(rolloutPlan).length === 0) return; + if (!validateRolloutPlan(rolloutPlan)) { + log.error('storage: invalid rollout plan provided'); + return; + } const { splits, rbSegments, segments, largeSegments } = storage; + const { splitChanges: { ff, rbs } } = rolloutPlan; log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); - if (splits) { + if (splits && ff) { splits.clear(); - splits.update(rolloutPlan.flags || [], [], rolloutPlan.since || -1); + splits.update(ff.d, [], ff.t); } - if (rbSegments) { + if (rbSegments && rbs) { rbSegments.clear(); - rbSegments.update(rolloutPlan.rbSegments || [], [], rolloutPlan.rbSince || -1); + rbSegments.update(rbs.d, [], rbs.t); } - const segmentsData = rolloutPlan.segments || {}; + const segmentChanges = rolloutPlan.segmentChanges; if (matchingKey) { // add memberships data (client-side) let memberships = rolloutPlan.memberships && rolloutPlan.memberships[matchingKey]; - if (!memberships && segmentsData) { + if (!memberships && segmentChanges) { memberships = { ms: { - k: Object.keys(segmentsData).filter(segmentName => { - const segmentKeys = segmentsData[segmentName]; - return segmentKeys.indexOf(matchingKey) > -1; - }).map(segmentName => ({ n: segmentName })) + k: segmentChanges.filter(segment => { + return segment.added.indexOf(matchingKey) > -1; + }).map(segment => ({ n: segment.name })) } }; } @@ -79,10 +78,11 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: if (memberships.ls && largeSegments) largeSegments.resetSegments(memberships.ls!); } } else { // add segments data (server-side) - Object.keys(segmentsData).forEach(segmentName => { - const segmentKeys = segmentsData[segmentName]; - segments.update(segmentName, segmentKeys, [], -1); - }); + if (segmentChanges) { + segmentChanges.forEach(segment => { + segments.update(segment.name, segment.added, segment.removed, segment.till); + }); + } } } @@ -91,21 +91,32 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: * If `keys` are provided, the memberships for those keys is returned, to protect segments data. * Otherwise, the segments data is returned. */ -export function getRolloutPlan(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): RolloutPlan { +export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { - log.debug(`storage: get feature flags and segments${keys ? ` for keys ${keys}` : ''}`); + const { keys, exposeSegments } = options; + const { splits, segments, rbSegments } = storage; + + log.debug(`storage: get feature flags${keys ? `, and memberships for keys ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); return { - since: storage.splits.getChangeNumber(), - flags: storage.splits.getAll(), - rbSince: storage.rbSegments.getChangeNumber(), - rbSegments: storage.rbSegments.getAll(), - segments: keys ? - undefined : // @ts-ignore accessing private prop - Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop - prev[cur] = setToArray(storage.segments.segmentCache[cur] as Set); - return prev; - }, {}), + splitChanges: { + ff: { + t: splits.getChangeNumber(), + d: splits.getAll(), + }, + rbs: { + t: rbSegments.getChangeNumber(), + d: rbSegments.getAll(), + } + }, + segmentChanges: exposeSegments ? // @ts-ignore accessing private prop + Object.keys(segments.segmentCache).map(segmentName => ({ + name: segmentName, // @ts-ignore + added: setToArray(segments.segmentCache[segmentName] as Set), + removed: [], + till: segments.getChangeNumber(segmentName)! + })) : + undefined, memberships: keys ? keys.reduce>((prev, key) => { if (storage.shared) { diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 3ffc3db6..3a9fe72d 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1031,6 +1031,23 @@ declare namespace SplitIO { * A JSON-serializable plain object that defines the format of rollout plan data to preload the SDK cache with feature flags and segments. */ type RolloutPlan = Object; + /** + * Options for the `factory.getRolloutPlan` method. + */ + type RolloutPlanOptions = { + /** + * Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys. + * + * @defaultValue `undefined` + */ + keys?: SplitKey[]; + /** + * Optional flag to expose segments data in the rollout plan snapshot. + * + * @defaultValue `false` + */ + exposeSegments?: boolean; + }; /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. @@ -1580,7 +1597,7 @@ declare namespace SplitIO { * @param keys - Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys, rather than the complete segments data. * @returns The current snapshot of the SDK rollout plan. */ - getRolloutPlan(keys?: SplitKey[]): RolloutPlan; + getRolloutPlan(options?: RolloutPlanOptions): RolloutPlan; } /** * This represents the interface for the SDK instance for server-side with asynchronous storage. From c20e74f76d0b336ea2bb756a1f222135b6cf1397 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 4 Sep 2025 12:03:19 -0300 Subject: [PATCH 20/30] refactor: do not mutate FF definitions when parsing matchers --- src/evaluator/convertions/index.ts | 10 ++++++++ src/evaluator/matchersTransform/index.ts | 7 +++--- src/storages/__tests__/dataLoader.spec.ts | 4 ++-- src/storages/dataLoader.ts | 29 ++++++++++------------- 4 files changed, 28 insertions(+), 22 deletions(-) diff --git a/src/evaluator/convertions/index.ts b/src/evaluator/convertions/index.ts index 7d7384d7..acad8017 100644 --- a/src/evaluator/convertions/index.ts +++ b/src/evaluator/convertions/index.ts @@ -1,3 +1,5 @@ +import { IBetweenMatcherData } from '../../dtos/types'; + export function zeroSinceHH(millisSinceEpoch: number): number { return new Date(millisSinceEpoch).setUTCHours(0, 0, 0, 0); } @@ -5,3 +7,11 @@ export function zeroSinceHH(millisSinceEpoch: number): number { export function zeroSinceSS(millisSinceEpoch: number): number { return new Date(millisSinceEpoch).setUTCSeconds(0, 0); } + +export function betweenDateTimeTransform(betweenMatcherData: IBetweenMatcherData): IBetweenMatcherData { + return { + dataType: betweenMatcherData.dataType, + start: zeroSinceSS(betweenMatcherData.start), + end: zeroSinceSS(betweenMatcherData.end) + }; +} diff --git a/src/evaluator/matchersTransform/index.ts b/src/evaluator/matchersTransform/index.ts index 6219c4dc..075ea9f0 100644 --- a/src/evaluator/matchersTransform/index.ts +++ b/src/evaluator/matchersTransform/index.ts @@ -3,7 +3,7 @@ import { matcherTypes, matcherTypesMapper, matcherDataTypes } from '../matchers/ import { segmentTransform } from './segment'; import { whitelistTransform } from './whitelist'; import { numericTransform } from './unaryNumeric'; -import { zeroSinceHH, zeroSinceSS } from '../convertions'; +import { zeroSinceHH, zeroSinceSS, betweenDateTimeTransform } from '../convertions'; import { IBetweenMatcherData, IInLargeSegmentMatcherData, IInSegmentMatcherData, ISplitMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; import { IMatcherDto } from '../types'; @@ -32,7 +32,7 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { let type = matcherTypesMapper(matcherType); // As default input data type we use string (even for ALL_KEYS) let dataType = matcherDataTypes.STRING; - let value = undefined; + let value; if (type === matcherTypes.IN_SEGMENT) { value = segmentTransform(userDefinedSegmentMatcherData as IInSegmentMatcherData); @@ -60,8 +60,7 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { dataType = matcherDataTypes.NUMBER; if (value.dataType === 'DATETIME') { - value.start = zeroSinceSS(value.start); - value.end = zeroSinceSS(value.end); + value = betweenDateTimeTransform(value); dataType = matcherDataTypes.DATETIME; } } else if (type === matcherTypes.BETWEEN_SEMVER) { diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 41dbde30..0225d3be 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -21,8 +21,8 @@ describe('getRolloutPlan & setRolloutPlan (client-side)', () => { const expectedRolloutPlan = { splitChanges: { - ff: { d: [{ name: 'split1' }], t: 123 }, - rbs: { d: [{ name: 'rbs1' }], t: 321 } + ff: { d: [{ name: 'split1' }], t: 123, s: -1 }, + rbs: { d: [{ name: 'rbs1' }], t: 321, s: -1 } }, memberships: { [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index f8741af3..62f74d5e 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -102,10 +102,12 @@ export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: Spl splitChanges: { ff: { t: splits.getChangeNumber(), + s: -1, d: splits.getAll(), }, rbs: { t: rbSegments.getChangeNumber(), + s: -1, d: rbSegments.getAll(), } }, @@ -119,27 +121,22 @@ export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: Spl undefined, memberships: keys ? keys.reduce>((prev, key) => { - if (storage.shared) { - // Client-side segments - // @ts-ignore accessing private prop - const sharedStorage = storage.shared(key); - prev[getMatching(key)] = { - ms: { - // @ts-ignore accessing private prop + const matchingKey = getMatching(key); + if (storage.shared) { // Client-side segments + const sharedStorage = storage.shared(matchingKey); + prev[matchingKey] = { + ms: { // @ts-ignore k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), }, - ls: sharedStorage.largeSegments ? { - // @ts-ignore accessing private prop + ls: sharedStorage.largeSegments ? { // @ts-ignore k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), } : undefined }; - } else { - prev[getMatching(key)] = { - ms: { - // Server-side segments - // @ts-ignore accessing private prop - k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop - return storage.segments.segmentCache[segmentName].has(key) ? + } else { // Server-side segments + prev[matchingKey] = { + ms: { // @ts-ignore + k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore + return storage.segments.segmentCache[segmentName].has(matchingKey) ? prev!.concat({ n: segmentName }) : prev; }, []) From c65b3d026f3edd4a4fc618771243e99fec5f4ca6 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 4 Sep 2025 14:26:51 -0300 Subject: [PATCH 21/30] refactor: call setRolloutPlan outside storage, to generalize to any storage --- src/sdkClient/sdkClientMethodCS.ts | 9 +++- src/sdkFactory/index.ts | 12 ++++- src/storages/__tests__/dataLoader.spec.ts | 59 ++++++++++------------ src/storages/inLocalStorage/index.ts | 6 +-- src/storages/inMemory/InMemoryStorageCS.ts | 51 +++++-------------- 5 files changed, 61 insertions(+), 76 deletions(-) diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index ebc755a1..280e9509 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -9,13 +9,16 @@ import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, L import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; import { buildInstanceId } from './identity'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; +import { setRolloutPlan } from '../storages/dataLoader'; +import { ISegmentsCacheSync } from '../storages/types'; /** * Factory of client method for the client-side API variant where TT is ignored. * Therefore, clients don't have a bound TT for the track method. */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.IBrowserClient { - const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log } } = params; + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log, initialRolloutPlan, mode } } = params; const mainClientInstance = clientCSDecorator( log, @@ -56,6 +59,10 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl sharedSdkReadiness.readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); }); + if (sharedStorage && initialRolloutPlan && !isConsumerMode(mode)) { + setRolloutPlan(log, initialRolloutPlan, { segments: sharedStorage.segments as ISegmentsCacheSync, largeSegments: sharedStorage.largeSegments as ISegmentsCacheSync }, matchingKey); + } + // 3 possibilities: // - Standalone mode: both syncManager and sharedSyncManager are defined // - Consumer mode: both syncManager and sharedSyncManager are undefined diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 47cf69c3..decbfa9a 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -14,6 +14,10 @@ import { strategyOptimizedFactory } from '../trackers/strategy/strategyOptimized import { strategyNoneFactory } from '../trackers/strategy/strategyNone'; import { uniqueKeysTrackerFactory } from '../trackers/uniqueKeysTracker'; import { DEBUG, OPTIMIZED } from '../utils/constants'; +import { setRolloutPlan } from '../storages/dataLoader'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; +import { IStorageSync } from '../storages/types'; +import { getMatching } from '../utils/key'; /** * Modular SDK factory @@ -24,7 +28,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA syncManagerFactory, SignalListener, impressionsObserverFactory, integrationsManagerFactory, sdkManagerFactory, sdkClientMethodFactory, filterAdapterFactory, lazyInit } = params; - const { log, sync: { impressionsMode } } = settings; + const { log, sync: { impressionsMode }, initialRolloutPlan, mode, core: { key } } = settings; // @TODO handle non-recoverable errors, such as, global `fetch` not available, invalid SDK Key, etc. // On non-recoverable errors, we should mark the SDK as destroyed and not start synchronization. @@ -57,7 +61,11 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA } }); - // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` + if (initialRolloutPlan && !isConsumerMode(mode)) { + setRolloutPlan(log, initialRolloutPlan, storage as IStorageSync, key && getMatching(key)); + if ((storage as IStorageSync).splits.getChangeNumber() > -1) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); + } + const clients: Record = {}; const telemetryTracker = telemetryTrackerFactory(storage.telemetry, platform.now); const integrationsManager = integrationsManagerFactory && integrationsManagerFactory({ settings, storage, telemetryTracker }); diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 0225d3be..9dca7faf 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -4,13 +4,9 @@ import { fullSettings } from '../../utils/settingsValidation/__tests__/settings. import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import { IRBSegment, ISplit } from '../../dtos/types'; -import * as dataLoader from '../dataLoader'; +import { setRolloutPlan, getRolloutPlan } from '../dataLoader'; describe('getRolloutPlan & setRolloutPlan (client-side)', () => { - jest.spyOn(dataLoader, 'setRolloutPlan'); - const onReadyFromCacheCb = jest.fn(); - const onReadyCb = jest.fn(); - const otherKey = 'otherKey'; // @ts-expect-error Load server-side storage @@ -41,56 +37,54 @@ describe('getRolloutPlan & setRolloutPlan (client-side)', () => { }); test('using preloaded data (no memberships, no segments)', () => { - const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage); + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage); - // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); expect(clientStorage.segments.getRegisteredSegments()).toEqual([]); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual([]); // Get preloaded data from client-side storage - expect(dataLoader.getRolloutPlan(loggerMock, clientStorage)).toEqual(rolloutPlan); + expect(getRolloutPlan(loggerMock, clientStorage)).toEqual(rolloutPlan); expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: undefined, segmentChanges: undefined }); }); test('using preloaded data with memberships', () => { - const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string, otherKey] }); + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string, otherKey] }); - // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); - // Get preloaded data from client-side storage - expect(dataLoader.getRolloutPlan(loggerMock, clientStorage, { keys: [fullSettings.core.key as string, otherKey] })).toEqual(rolloutPlan); - expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, segmentChanges: undefined }); + // @TODO requires internal storage cache for `shared` storages + // // Get preloaded data from client-side storage + // expect(getRolloutPlan(loggerMock, clientStorage, { keys: [fullSettings.core.key as string, otherKey] })).toEqual(rolloutPlan); + // expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, segmentChanges: undefined }); }); test('using preloaded data with segments', () => { - const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { exposeSegments: true }); + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { exposeSegments: true }); - // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); @@ -99,16 +93,15 @@ describe('getRolloutPlan & setRolloutPlan (client-side)', () => { }); test('using preloaded data with memberships and segments', () => { - const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string], exposeSegments: true }); + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string], exposeSegments: true }); - // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // main client membership is set via the rollout plan `memberships` field expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // shared client membership is set via the rollout plan `segmentChanges` field diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index 03d5bfc1..8924b84d 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -26,9 +26,9 @@ export function InLocalStorage(options: SplitIO.InLocalStorageOptions = {}): ISt function InLocalStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - // Fallback to InMemoryStorage if LocalStorage API is not available or preloaded data is provided - if (!isLocalStorageAvailable() || params.settings.initialRolloutPlan) { - params.settings.log.warn(LOG_PREFIX + 'LocalStorage API is unavailable or `initialRolloutPlan` is provided. Falling back to default MEMORY storage'); + // Fallback to InMemoryStorage if LocalStorage API is not available + if (!isLocalStorageAvailable()) { + params.settings.log.warn(LOG_PREFIX + 'LocalStorage API is unavailable. Falling back to default MEMORY storage'); return InMemoryStorageCSFactory(params); } diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index e6b5becc..5ae8351c 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -7,8 +7,6 @@ import { ImpressionCountsCacheInMemory } from './ImpressionCountsCacheInMemory'; import { LOCALHOST_MODE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; -import { getMatching } from '../../utils/key'; -import { setRolloutPlan } from '../dataLoader'; import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; /** @@ -17,9 +15,7 @@ import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; * @param params - parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation }, initialRolloutPlan }, onReadyFromCacheCb } = params; - - const storages: Record = {}; + const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation } } } = params; const splits = new SplitsCacheInMemory(__splitFiltersValidation); const rbSegments = new RBSegmentsCacheInMemory(); @@ -40,31 +36,20 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag destroy() { }, // When using shared instantiation with MEMORY we reuse everything but segments (they are unique per key) - shared(matchingKey: string) { - if (!storages[matchingKey]) { - const segments = new MySegmentsCacheInMemory(); - const largeSegments = new MySegmentsCacheInMemory(); - - if (initialRolloutPlan) { - setRolloutPlan(log, initialRolloutPlan, { segments, largeSegments }, matchingKey); - } - - storages[matchingKey] = { - splits: this.splits, - rbSegments: this.rbSegments, - segments, - largeSegments, - impressions: this.impressions, - impressionCounts: this.impressionCounts, - events: this.events, - telemetry: this.telemetry, - uniqueKeys: this.uniqueKeys, - - destroy() { } - }; - } + shared() { + return { + splits: this.splits, + rbSegments: this.rbSegments, + segments: new MySegmentsCacheInMemory(), + largeSegments: new MySegmentsCacheInMemory(), + impressions: this.impressions, + impressionCounts: this.impressionCounts, + events: this.events, + telemetry: this.telemetry, + uniqueKeys: this.uniqueKeys, - return storages[matchingKey]; + destroy() { } + }; }, }; @@ -78,14 +63,6 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag storage.uniqueKeys.track = noopTrack; } - const matchingKey = getMatching(params.settings.core.key); - storages[matchingKey] = storage; - - if (initialRolloutPlan) { - setRolloutPlan(log, initialRolloutPlan, storage, matchingKey); - if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); - } - return storage; } From 9ddadd63fb02eb7a611ed6e992335f75aa572566 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 14:39:10 -0300 Subject: [PATCH 22/30] refactor: mode rollout plan validation --- src/sdkClient/sdkClientMethodCS.ts | 5 +- src/sdkFactory/index.ts | 5 +- src/storages/__tests__/dataLoader.spec.ts | 60 +++++++++++++------- src/storages/dataLoader.ts | 22 ++++--- src/storages/inLocalStorage/validateCache.ts | 4 +- src/utils/settingsValidation/index.ts | 4 ++ 6 files changed, 63 insertions(+), 37 deletions(-) diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index 280e9509..c1f16676 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -9,7 +9,6 @@ import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, L import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; import { buildInstanceId } from './identity'; -import { isConsumerMode } from '../utils/settingsValidation/mode'; import { setRolloutPlan } from '../storages/dataLoader'; import { ISegmentsCacheSync } from '../storages/types'; @@ -18,7 +17,7 @@ import { ISegmentsCacheSync } from '../storages/types'; * Therefore, clients don't have a bound TT for the track method. */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.IBrowserClient { - const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log, initialRolloutPlan, mode } } = params; + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log, initialRolloutPlan } } = params; const mainClientInstance = clientCSDecorator( log, @@ -59,7 +58,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl sharedSdkReadiness.readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); }); - if (sharedStorage && initialRolloutPlan && !isConsumerMode(mode)) { + if (sharedStorage && initialRolloutPlan) { setRolloutPlan(log, initialRolloutPlan, { segments: sharedStorage.segments as ISegmentsCacheSync, largeSegments: sharedStorage.largeSegments as ISegmentsCacheSync }, matchingKey); } diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index decbfa9a..994a529d 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -15,7 +15,6 @@ import { strategyNoneFactory } from '../trackers/strategy/strategyNone'; import { uniqueKeysTrackerFactory } from '../trackers/uniqueKeysTracker'; import { DEBUG, OPTIMIZED } from '../utils/constants'; import { setRolloutPlan } from '../storages/dataLoader'; -import { isConsumerMode } from '../utils/settingsValidation/mode'; import { IStorageSync } from '../storages/types'; import { getMatching } from '../utils/key'; @@ -28,7 +27,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA syncManagerFactory, SignalListener, impressionsObserverFactory, integrationsManagerFactory, sdkManagerFactory, sdkClientMethodFactory, filterAdapterFactory, lazyInit } = params; - const { log, sync: { impressionsMode }, initialRolloutPlan, mode, core: { key } } = settings; + const { log, sync: { impressionsMode }, initialRolloutPlan, core: { key } } = settings; // @TODO handle non-recoverable errors, such as, global `fetch` not available, invalid SDK Key, etc. // On non-recoverable errors, we should mark the SDK as destroyed and not start synchronization. @@ -61,7 +60,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA } }); - if (initialRolloutPlan && !isConsumerMode(mode)) { + if (initialRolloutPlan) { setRolloutPlan(log, initialRolloutPlan, storage as IStorageSync, key && getMatching(key)); if ((storage as IStorageSync).splits.getChangeNumber() > -1) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); } diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 9dca7faf..c8589353 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -4,34 +4,54 @@ import { fullSettings } from '../../utils/settingsValidation/__tests__/settings. import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import { IRBSegment, ISplit } from '../../dtos/types'; -import { setRolloutPlan, getRolloutPlan } from '../dataLoader'; +import { validateRolloutPlan, setRolloutPlan, getRolloutPlan } from '../dataLoader'; + +const otherKey = 'otherKey'; +const expectedRolloutPlan = { + splitChanges: { + ff: { d: [{ name: 'split1' }], t: 123, s: -1 }, + rbs: { d: [{ name: 'rbs1' }], t: 321, s: -1 } + }, + memberships: { + [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, + [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } + }, + segmentChanges: [{ + name: 'segment1', + added: [fullSettings.core.key as string, otherKey], + removed: [], + till: 123 + }] +}; + +describe('validateRolloutPlan', () => { + afterEach(() => { + loggerMock.mockClear(); + }); -describe('getRolloutPlan & setRolloutPlan (client-side)', () => { - const otherKey = 'otherKey'; + test('valid rollout plan and mode', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'standalone', initialRolloutPlan: expectedRolloutPlan } as any)).toEqual(expectedRolloutPlan); + expect(loggerMock.error).not.toHaveBeenCalled(); + }); + + test('invalid rollout plan', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'standalone', initialRolloutPlan: {} } as any)).toBeUndefined(); + expect(loggerMock.error).toHaveBeenCalledWith('storage: invalid rollout plan provided'); + }); + test('invalid mode', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'consumer', initialRolloutPlan: expectedRolloutPlan } as any)).toBeUndefined(); + expect(loggerMock.warn).toHaveBeenCalledWith('storage: initial rollout plan is ignored in consumer mode'); + }); +}); + +describe('getRolloutPlan & setRolloutPlan (client-side)', () => { // @ts-expect-error Load server-side storage const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); serverStorage.splits.update([{ name: 'split1' } as ISplit], [], 123); serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); - const expectedRolloutPlan = { - splitChanges: { - ff: { d: [{ name: 'split1' }], t: 123, s: -1 }, - rbs: { d: [{ name: 'rbs1' }], t: 321, s: -1 } - }, - memberships: { - [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, - [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } - }, - segmentChanges: [{ - name: 'segment1', - added: [fullSettings.core.key as string, otherKey], - removed: [], - till: 123 - }] - }; - afterEach(() => { jest.clearAllMocks(); }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 62f74d5e..f61be538 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -5,6 +5,7 @@ import { getMatching } from '../utils/key'; import { IMembershipsResponse, IMySegmentsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; import { ILogger } from '../logger/types'; import { isObject } from '../utils/lang'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; export type RolloutPlan = { /** @@ -27,10 +28,18 @@ export type RolloutPlan = { /** * Validates if the given rollout plan is valid. */ -function validateRolloutPlan(rolloutPlan: unknown): rolloutPlan is RolloutPlan { - if (isObject(rolloutPlan) && isObject((rolloutPlan as any).splitChanges)) return true; +export function validateRolloutPlan(log: ILogger, settings: SplitIO.ISettings): RolloutPlan | undefined { + const { mode, initialRolloutPlan } = settings; - return false; + if (isConsumerMode(mode)) { + log.warn('storage: initial rollout plan is ignored in consumer mode'); + return; + } + + if (isObject(initialRolloutPlan) && isObject((initialRolloutPlan as any).splitChanges)) return initialRolloutPlan as RolloutPlan; + + log.error('storage: invalid rollout plan provided'); + return; } /** @@ -39,12 +48,6 @@ function validateRolloutPlan(rolloutPlan: unknown): rolloutPlan is RolloutPlan { * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). */ export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { - // Do not load data if current rollout plan is empty - if (!validateRolloutPlan(rolloutPlan)) { - log.error('storage: invalid rollout plan provided'); - return; - } - const { splits, rbSegments, segments, largeSegments } = storage; const { splitChanges: { ff, rbs } } = rolloutPlan; @@ -79,6 +82,7 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: } } else { // add segments data (server-side) if (segmentChanges) { + segments.clear(); segmentChanges.forEach(segment => { segments.update(segment.name, segment.added, segment.removed, segment.till); }); diff --git a/src/storages/inLocalStorage/validateCache.ts b/src/storages/inLocalStorage/validateCache.ts index 93d3144c..3fa54ec6 100644 --- a/src/storages/inLocalStorage/validateCache.ts +++ b/src/storages/inLocalStorage/validateCache.ts @@ -17,7 +17,7 @@ const MILLIS_IN_A_DAY = 86400000; * @returns `true` if cache should be cleared, `false` otherwise */ function validateExpiration(options: SplitIO.InLocalStorageOptions, settings: ISettings, keys: KeyBuilderCS, currentTimestamp: number, isThereCache: boolean) { - const { log } = settings; + const { log, initialRolloutPlan } = settings; // Check expiration const lastUpdatedTimestamp = parseInt(localStorage.getItem(keys.buildLastUpdatedKey()) as string, 10); @@ -41,7 +41,7 @@ function validateExpiration(options: SplitIO.InLocalStorageOptions, settings: IS } catch (e) { log.error(LOG_PREFIX + e); } - if (isThereCache) { + if (isThereCache && !initialRolloutPlan) { log.info(LOG_PREFIX + 'SDK key, flags filter criteria, or flags spec version has changed. Cleaning up cache'); return true; } diff --git a/src/utils/settingsValidation/index.ts b/src/utils/settingsValidation/index.ts index 3c7ecfe7..1c300ed6 100644 --- a/src/utils/settingsValidation/index.ts +++ b/src/utils/settingsValidation/index.ts @@ -7,6 +7,7 @@ import { ISettingsValidationParams } from './types'; import { ISettings } from '../../types'; import { validateKey } from '../inputValidation/key'; import { ERROR_MIN_CONFIG_PARAM, LOG_PREFIX_CLIENT_INSTANTIATION } from '../../logger/constants'; +import { validateRolloutPlan } from '../../storages/dataLoader'; // Exported for telemetry export const base = { @@ -152,6 +153,9 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV // @ts-ignore, modify readonly prop if (storage) withDefaults.storage = storage(withDefaults); + // @ts-ignore, modify readonly prop + if (withDefaults.initialRolloutPlan) withDefaults.initialRolloutPlan = validateRolloutPlan(log, withDefaults); + // Validate key and TT (for client-side) const maybeKey = withDefaults.core.key; if (validationParams.acceptKey) { From 590daa2ad21ba9cf9c1fe32afcd145c3f53d1a2c Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 14:44:57 -0300 Subject: [PATCH 23/30] Separate getRolloutPlan and setRolloutPlan for bundle size reduction --- src/sdkClient/sdkClientMethodCS.ts | 2 +- src/sdkFactory/index.ts | 2 +- src/storages/__tests__/dataLoader.spec.ts | 3 +- src/storages/dataLoader.ts | 157 ---------------------- src/storages/getRolloutPlan.ts | 73 ++++++++++ src/storages/setRolloutPlan.ts | 71 ++++++++++ src/storages/types.ts | 20 ++- src/types.ts | 2 +- src/utils/settingsValidation/index.ts | 2 +- 9 files changed, 169 insertions(+), 163 deletions(-) delete mode 100644 src/storages/dataLoader.ts create mode 100644 src/storages/getRolloutPlan.ts create mode 100644 src/storages/setRolloutPlan.ts diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index c1f16676..b68481a9 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -9,7 +9,7 @@ import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, L import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; import { buildInstanceId } from './identity'; -import { setRolloutPlan } from '../storages/dataLoader'; +import { setRolloutPlan } from '../storages/setRolloutPlan'; import { ISegmentsCacheSync } from '../storages/types'; /** diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 994a529d..d1dcac43 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -14,7 +14,7 @@ import { strategyOptimizedFactory } from '../trackers/strategy/strategyOptimized import { strategyNoneFactory } from '../trackers/strategy/strategyNone'; import { uniqueKeysTrackerFactory } from '../trackers/uniqueKeysTracker'; import { DEBUG, OPTIMIZED } from '../utils/constants'; -import { setRolloutPlan } from '../storages/dataLoader'; +import { setRolloutPlan } from '../storages/setRolloutPlan'; import { IStorageSync } from '../storages/types'; import { getMatching } from '../utils/key'; diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index c8589353..f6afd300 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -4,7 +4,8 @@ import { fullSettings } from '../../utils/settingsValidation/__tests__/settings. import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import { IRBSegment, ISplit } from '../../dtos/types'; -import { validateRolloutPlan, setRolloutPlan, getRolloutPlan } from '../dataLoader'; +import { validateRolloutPlan, setRolloutPlan } from '../setRolloutPlan'; +import { getRolloutPlan } from '../getRolloutPlan'; const otherKey = 'otherKey'; const expectedRolloutPlan = { diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts deleted file mode 100644 index f61be538..00000000 --- a/src/storages/dataLoader.ts +++ /dev/null @@ -1,157 +0,0 @@ -import SplitIO from '../../types/splitio'; -import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; -import { setToArray } from '../utils/lang/sets'; -import { getMatching } from '../utils/key'; -import { IMembershipsResponse, IMySegmentsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; -import { ILogger } from '../logger/types'; -import { isObject } from '../utils/lang'; -import { isConsumerMode } from '../utils/settingsValidation/mode'; - -export type RolloutPlan = { - /** - * Feature flags and rule-based segments. - */ - splitChanges: ISplitChangesResponse; - /** - * Optional map of matching keys to their memberships. - */ - memberships?: { - [matchingKey: string]: IMembershipsResponse; - }; - /** - * Optional list of standard segments. - * This property is ignored if `memberships` is provided. - */ - segmentChanges?: ISegmentChangesResponse[]; -}; - -/** - * Validates if the given rollout plan is valid. - */ -export function validateRolloutPlan(log: ILogger, settings: SplitIO.ISettings): RolloutPlan | undefined { - const { mode, initialRolloutPlan } = settings; - - if (isConsumerMode(mode)) { - log.warn('storage: initial rollout plan is ignored in consumer mode'); - return; - } - - if (isObject(initialRolloutPlan) && isObject((initialRolloutPlan as any).splitChanges)) return initialRolloutPlan as RolloutPlan; - - log.error('storage: invalid rollout plan provided'); - return; -} - -/** - * Sets the given synchronous storage with the provided rollout plan snapshot. - * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). - * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). - */ -export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { - const { splits, rbSegments, segments, largeSegments } = storage; - const { splitChanges: { ff, rbs } } = rolloutPlan; - - log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); - - if (splits && ff) { - splits.clear(); - splits.update(ff.d, [], ff.t); - } - - if (rbSegments && rbs) { - rbSegments.clear(); - rbSegments.update(rbs.d, [], rbs.t); - } - - const segmentChanges = rolloutPlan.segmentChanges; - if (matchingKey) { // add memberships data (client-side) - let memberships = rolloutPlan.memberships && rolloutPlan.memberships[matchingKey]; - if (!memberships && segmentChanges) { - memberships = { - ms: { - k: segmentChanges.filter(segment => { - return segment.added.indexOf(matchingKey) > -1; - }).map(segment => ({ n: segment.name })) - } - }; - } - - if (memberships) { - if (memberships.ms) segments.resetSegments(memberships.ms!); - if (memberships.ls && largeSegments) largeSegments.resetSegments(memberships.ls!); - } - } else { // add segments data (server-side) - if (segmentChanges) { - segments.clear(); - segmentChanges.forEach(segment => { - segments.update(segment.name, segment.added, segment.removed, segment.till); - }); - } - } -} - -/** - * Gets the rollout plan snapshot from the given synchronous storage. - * If `keys` are provided, the memberships for those keys is returned, to protect segments data. - * Otherwise, the segments data is returned. - */ -export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { - - const { keys, exposeSegments } = options; - const { splits, segments, rbSegments } = storage; - - log.debug(`storage: get feature flags${keys ? `, and memberships for keys ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); - - return { - splitChanges: { - ff: { - t: splits.getChangeNumber(), - s: -1, - d: splits.getAll(), - }, - rbs: { - t: rbSegments.getChangeNumber(), - s: -1, - d: rbSegments.getAll(), - } - }, - segmentChanges: exposeSegments ? // @ts-ignore accessing private prop - Object.keys(segments.segmentCache).map(segmentName => ({ - name: segmentName, // @ts-ignore - added: setToArray(segments.segmentCache[segmentName] as Set), - removed: [], - till: segments.getChangeNumber(segmentName)! - })) : - undefined, - memberships: keys ? - keys.reduce>((prev, key) => { - const matchingKey = getMatching(key); - if (storage.shared) { // Client-side segments - const sharedStorage = storage.shared(matchingKey); - prev[matchingKey] = { - ms: { // @ts-ignore - k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), - }, - ls: sharedStorage.largeSegments ? { // @ts-ignore - k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), - } : undefined - }; - } else { // Server-side segments - prev[matchingKey] = { - ms: { // @ts-ignore - k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore - return storage.segments.segmentCache[segmentName].has(matchingKey) ? - prev!.concat({ n: segmentName }) : - prev; - }, []) - }, - ls: { - k: [] - } - }; - } - return prev; - }, {}) : - undefined - }; -} diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts new file mode 100644 index 00000000..db6333aa --- /dev/null +++ b/src/storages/getRolloutPlan.ts @@ -0,0 +1,73 @@ +import SplitIO from '../../types/splitio'; +import { IStorageSync } from './types'; +import { setToArray } from '../utils/lang/sets'; +import { getMatching } from '../utils/key'; +import { ILogger } from '../logger/types'; +import { RolloutPlan } from './types'; +import { IMembershipsResponse, IMySegmentsResponse } from '../dtos/types'; + +/** + * Gets the rollout plan snapshot from the given synchronous storage. + * If `keys` are provided, the memberships for those keys is returned, to protect segments data. + * Otherwise, the segments data is returned. + */ +export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { + + const { keys, exposeSegments } = options; + const { splits, segments, rbSegments } = storage; + + log.debug(`storage: get feature flags${keys ? `, and memberships for keys ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); + + return { + splitChanges: { + ff: { + t: splits.getChangeNumber(), + s: -1, + d: splits.getAll(), + }, + rbs: { + t: rbSegments.getChangeNumber(), + s: -1, + d: rbSegments.getAll(), + } + }, + segmentChanges: exposeSegments ? // @ts-ignore accessing private prop + Object.keys(segments.segmentCache).map(segmentName => ({ + name: segmentName, // @ts-ignore + added: setToArray(segments.segmentCache[segmentName] as Set), + removed: [], + till: segments.getChangeNumber(segmentName)! + })) : + undefined, + memberships: keys ? + keys.reduce>((prev, key) => { + const matchingKey = getMatching(key); + if (storage.shared) { // Client-side segments + const sharedStorage = storage.shared(matchingKey); + prev[matchingKey] = { + ms: { // @ts-ignore + k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), + }, + ls: sharedStorage.largeSegments ? { // @ts-ignore + k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), + } : undefined + }; + } else { // Server-side segments + prev[matchingKey] = { + ms: { // @ts-ignore + k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore + return storage.segments.segmentCache[segmentName].has(matchingKey) ? + prev!.concat({ n: segmentName }) : + prev; + }, []) + }, + ls: { + k: [] + } + }; + } + return prev; + }, {}) : + undefined + }; +} diff --git a/src/storages/setRolloutPlan.ts b/src/storages/setRolloutPlan.ts new file mode 100644 index 00000000..a8529231 --- /dev/null +++ b/src/storages/setRolloutPlan.ts @@ -0,0 +1,71 @@ +import SplitIO from '../../types/splitio'; +import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync } from './types'; +import { ILogger } from '../logger/types'; +import { isObject } from '../utils/lang'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; +import { RolloutPlan } from './types'; + +/** + * Validates if the given rollout plan is valid. + */ +export function validateRolloutPlan(log: ILogger, settings: SplitIO.ISettings): RolloutPlan | undefined { + const { mode, initialRolloutPlan } = settings; + + if (isConsumerMode(mode)) { + log.warn('storage: initial rollout plan is ignored in consumer mode'); + return; + } + + if (isObject(initialRolloutPlan) && isObject((initialRolloutPlan as any).splitChanges)) return initialRolloutPlan as RolloutPlan; + + log.error('storage: invalid rollout plan provided'); + return; +} + +/** + * Sets the given synchronous storage with the provided rollout plan snapshot. + * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). + * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). + */ +export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { + const { splits, rbSegments, segments, largeSegments } = storage; + const { splitChanges: { ff, rbs } } = rolloutPlan; + + log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); + + if (splits && ff) { + splits.clear(); + splits.update(ff.d, [], ff.t); + } + + if (rbSegments && rbs) { + rbSegments.clear(); + rbSegments.update(rbs.d, [], rbs.t); + } + + const segmentChanges = rolloutPlan.segmentChanges; + if (matchingKey) { // add memberships data (client-side) + let memberships = rolloutPlan.memberships && rolloutPlan.memberships[matchingKey]; + if (!memberships && segmentChanges) { + memberships = { + ms: { + k: segmentChanges.filter(segment => { + return segment.added.indexOf(matchingKey) > -1; + }).map(segment => ({ n: segment.name })) + } + }; + } + + if (memberships) { + if (memberships.ms) segments.resetSegments(memberships.ms!); + if (memberships.ls && largeSegments) largeSegments.resetSegments(memberships.ls!); + } + } else { // add segments data (server-side) + if (segmentChanges) { + segments.clear(); + segmentChanges.forEach(segment => { + segments.update(segment.name, segment.added, segment.removed, segment.till); + }); + } + } +} diff --git a/src/storages/types.ts b/src/storages/types.ts index 53b049ed..219e9a55 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,5 +1,5 @@ import SplitIO from '../../types/splitio'; -import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse } from '../dtos/types'; +import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse, IMembershipsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { ISettings } from '../types'; @@ -520,3 +520,21 @@ export type IStorageAsyncFactory = SplitIO.StorageAsyncFactory & { readonly type: SplitIO.StorageType, (params: IStorageFactoryParams): IStorageAsync } + +export type RolloutPlan = { + /** + * Feature flags and rule-based segments. + */ + splitChanges: ISplitChangesResponse; + /** + * Optional map of matching keys to their memberships. + */ + memberships?: { + [matchingKey: string]: IMembershipsResponse; + }; + /** + * Optional list of standard segments. + * This property is ignored if `memberships` is provided. + */ + segmentChanges?: ISegmentChangesResponse[]; +}; diff --git a/src/types.ts b/src/types.ts index be4132a1..ad3fa04c 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,7 +1,7 @@ import SplitIO from '../types/splitio'; import { ISplitFiltersValidation } from './dtos/types'; import { ILogger } from './logger/types'; -import { RolloutPlan } from './storages/dataLoader'; +import { RolloutPlan } from './storages/types'; /** * SplitIO.ISettings interface extended with private properties for internal use diff --git a/src/utils/settingsValidation/index.ts b/src/utils/settingsValidation/index.ts index 1c300ed6..2dc63018 100644 --- a/src/utils/settingsValidation/index.ts +++ b/src/utils/settingsValidation/index.ts @@ -7,7 +7,7 @@ import { ISettingsValidationParams } from './types'; import { ISettings } from '../../types'; import { validateKey } from '../inputValidation/key'; import { ERROR_MIN_CONFIG_PARAM, LOG_PREFIX_CLIENT_INSTANTIATION } from '../../logger/constants'; -import { validateRolloutPlan } from '../../storages/dataLoader'; +import { validateRolloutPlan } from '../../storages/setRolloutPlan'; // Exported for telemetry export const base = { From 4ee373f48a6a2c0ab84a71c53ebe73ad0e706b66 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 14:49:14 -0300 Subject: [PATCH 24/30] Polishing --- src/dtos/types.ts | 2 +- src/storages/__tests__/dataLoader.spec.ts | 1 + src/storages/getRolloutPlan.ts | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dtos/types.ts b/src/dtos/types.ts index a72b751b..78d62de4 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -259,7 +259,7 @@ export interface ISegmentChangesResponse { name: string, added: string[], removed: string[], - since?: number, + since: number, till: number } diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index f6afd300..3f1de562 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -21,6 +21,7 @@ const expectedRolloutPlan = { name: 'segment1', added: [fullSettings.core.key as string, otherKey], removed: [], + since: -1, till: 123 }] }; diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts index db6333aa..d4ac25d8 100644 --- a/src/storages/getRolloutPlan.ts +++ b/src/storages/getRolloutPlan.ts @@ -36,6 +36,7 @@ export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: Spl name: segmentName, // @ts-ignore added: setToArray(segments.segmentCache[segmentName] as Set), removed: [], + since: -1, till: segments.getChangeNumber(segmentName)! })) : undefined, From 0d1ba870d2329d07e0eee480735622aad93f1901 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 15:01:01 -0300 Subject: [PATCH 25/30] refactor: add getAll method to segment caches and refactor datetime transformation to not mutate FF definitions --- src/evaluator/convertions/index.ts | 10 +++++++ src/evaluator/matchersTransform/index.ts | 7 ++--- .../inLocalStorage/RBSegmentsCacheInLocal.ts | 4 +++ .../inMemory/RBSegmentsCacheInMemory.ts | 4 +++ src/storages/types.ts | 28 ++++++++++++++++--- 5 files changed, 45 insertions(+), 8 deletions(-) diff --git a/src/evaluator/convertions/index.ts b/src/evaluator/convertions/index.ts index 7d7384d7..acad8017 100644 --- a/src/evaluator/convertions/index.ts +++ b/src/evaluator/convertions/index.ts @@ -1,3 +1,5 @@ +import { IBetweenMatcherData } from '../../dtos/types'; + export function zeroSinceHH(millisSinceEpoch: number): number { return new Date(millisSinceEpoch).setUTCHours(0, 0, 0, 0); } @@ -5,3 +7,11 @@ export function zeroSinceHH(millisSinceEpoch: number): number { export function zeroSinceSS(millisSinceEpoch: number): number { return new Date(millisSinceEpoch).setUTCSeconds(0, 0); } + +export function betweenDateTimeTransform(betweenMatcherData: IBetweenMatcherData): IBetweenMatcherData { + return { + dataType: betweenMatcherData.dataType, + start: zeroSinceSS(betweenMatcherData.start), + end: zeroSinceSS(betweenMatcherData.end) + }; +} diff --git a/src/evaluator/matchersTransform/index.ts b/src/evaluator/matchersTransform/index.ts index 6219c4dc..075ea9f0 100644 --- a/src/evaluator/matchersTransform/index.ts +++ b/src/evaluator/matchersTransform/index.ts @@ -3,7 +3,7 @@ import { matcherTypes, matcherTypesMapper, matcherDataTypes } from '../matchers/ import { segmentTransform } from './segment'; import { whitelistTransform } from './whitelist'; import { numericTransform } from './unaryNumeric'; -import { zeroSinceHH, zeroSinceSS } from '../convertions'; +import { zeroSinceHH, zeroSinceSS, betweenDateTimeTransform } from '../convertions'; import { IBetweenMatcherData, IInLargeSegmentMatcherData, IInSegmentMatcherData, ISplitMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; import { IMatcherDto } from '../types'; @@ -32,7 +32,7 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { let type = matcherTypesMapper(matcherType); // As default input data type we use string (even for ALL_KEYS) let dataType = matcherDataTypes.STRING; - let value = undefined; + let value; if (type === matcherTypes.IN_SEGMENT) { value = segmentTransform(userDefinedSegmentMatcherData as IInSegmentMatcherData); @@ -60,8 +60,7 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { dataType = matcherDataTypes.NUMBER; if (value.dataType === 'DATETIME') { - value.start = zeroSinceSS(value.start); - value.end = zeroSinceSS(value.end); + value = betweenDateTimeTransform(value); dataType = matcherDataTypes.DATETIME; } } else if (type === matcherTypes.BETWEEN_SEMVER) { diff --git a/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts b/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts index 37f6ad8e..cfc68cf5 100644 --- a/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts @@ -105,6 +105,10 @@ export class RBSegmentsCacheInLocal implements IRBSegmentsCacheSync { return item && JSON.parse(item); } + getAll(): IRBSegment[] { + return this.getNames().map(key => this.get(key)!); + } + contains(names: Set): boolean { const namesArray = setToArray(names); const namesInStorage = this.getNames(); diff --git a/src/storages/inMemory/RBSegmentsCacheInMemory.ts b/src/storages/inMemory/RBSegmentsCacheInMemory.ts index 568b0deb..2b876202 100644 --- a/src/storages/inMemory/RBSegmentsCacheInMemory.ts +++ b/src/storages/inMemory/RBSegmentsCacheInMemory.ts @@ -51,6 +51,10 @@ export class RBSegmentsCacheInMemory implements IRBSegmentsCacheSync { return this.cache[name] || null; } + getAll(): IRBSegment[] { + return this.getNames().map(key => this.get(key)!); + } + contains(names: Set): boolean { const namesArray = setToArray(names); const namesInStorage = this.getNames(); diff --git a/src/storages/types.ts b/src/storages/types.ts index 8e93daca..2737da40 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,5 +1,5 @@ import SplitIO from '../../types/splitio'; -import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse } from '../dtos/types'; +import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse, IMembershipsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { ISettings } from '../types'; @@ -235,6 +235,7 @@ export interface IRBSegmentsCacheSync extends IRBSegmentsCacheBase { update(toAdd: IRBSegment[], toRemove: IRBSegment[], changeNumber: number): boolean, get(name: string): IRBSegment | null, getChangeNumber(): number, + getAll(): IRBSegment[], clear(): void, contains(names: Set): boolean, // Used only for smart pausing in client-side standalone. Returns true if the storage contains a RBSegment using segments or large segments matchers @@ -465,7 +466,7 @@ export interface IStorageBase< telemetry?: TTelemetryCache, uniqueKeys: TUniqueKeysCache, destroy(): void | Promise, - shared?: (matchingKey: string, onReadyCb: (error?: any) => void) => this + shared?: (matchingKey: string, onReadyCb?: (error?: any) => void) => this } export interface IStorageSync extends IStorageBase< @@ -496,8 +497,6 @@ export interface IStorageAsync extends IStorageBase< /** StorageFactory */ -export type DataLoader = (storage: IStorageSync, matchingKey: string) => void - export interface IStorageFactoryParams { settings: ISettings, /** @@ -505,6 +504,9 @@ export interface IStorageFactoryParams { * It is meant for emitting SDK_READY event in consumer mode, and waiting before using the storage in the synchronizer. */ onReadyCb: (error?: any) => void, + /** + * For emitting SDK_READY_FROM_CACHE event in consumer mode with Redis to allow immediate evaluations + */ onReadyFromCacheCb: () => void, } @@ -518,3 +520,21 @@ export type IStorageAsyncFactory = SplitIO.StorageAsyncFactory & { readonly type: SplitIO.StorageType, (params: IStorageFactoryParams): IStorageAsync } + +export type RolloutPlan = { + /** + * Feature flags and rule-based segments. + */ + splitChanges: ISplitChangesResponse; + /** + * Optional map of matching keys to their memberships. + */ + memberships?: { + [matchingKey: string]: IMembershipsResponse; + }; + /** + * Optional list of standard segments. + * This property is ignored if `memberships` is provided. + */ + segmentChanges?: ISegmentChangesResponse[]; +}; From 3d5c5e4a9d36a64d591488718703d44b75316fd6 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 15:07:33 -0300 Subject: [PATCH 26/30] Fix types --- src/storages/types.ts | 22 +++------------------- 1 file changed, 3 insertions(+), 19 deletions(-) diff --git a/src/storages/types.ts b/src/storages/types.ts index 2737da40..97664de5 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,5 +1,5 @@ import SplitIO from '../../types/splitio'; -import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse, IMembershipsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; +import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { ISettings } from '../types'; @@ -497,6 +497,8 @@ export interface IStorageAsync extends IStorageBase< /** StorageFactory */ +export type DataLoader = (storage: IStorageSync, matchingKey: string) => void + export interface IStorageFactoryParams { settings: ISettings, /** @@ -520,21 +522,3 @@ export type IStorageAsyncFactory = SplitIO.StorageAsyncFactory & { readonly type: SplitIO.StorageType, (params: IStorageFactoryParams): IStorageAsync } - -export type RolloutPlan = { - /** - * Feature flags and rule-based segments. - */ - splitChanges: ISplitChangesResponse; - /** - * Optional map of matching keys to their memberships. - */ - memberships?: { - [matchingKey: string]: IMembershipsResponse; - }; - /** - * Optional list of standard segments. - * This property is ignored if `memberships` is provided. - */ - segmentChanges?: ISegmentChangesResponse[]; -}; From 9ddac790e62d660be07c8939d5f1b1ba76b40c42 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 15:21:12 -0300 Subject: [PATCH 27/30] Add data loader utils: getRolloutPlan, setRolloutPlan, validateRolloutPlan --- src/storages/__tests__/dataLoader.spec.ts | 133 +++++++++++++++ src/storages/dataLoader.ts | 55 ------ src/storages/getRolloutPlan.ts | 74 +++++++++ src/storages/setRolloutPlan.ts | 71 ++++++++ src/storages/types.ts | 20 ++- src/types.ts | 37 +---- .../__tests__/preloadedData.spec.ts | 157 ------------------ src/utils/inputValidation/index.ts | 1 - src/utils/inputValidation/preloadedData.ts | 57 ------- types/splitio.d.ts | 38 ++++- 10 files changed, 335 insertions(+), 308 deletions(-) create mode 100644 src/storages/__tests__/dataLoader.spec.ts delete mode 100644 src/storages/dataLoader.ts create mode 100644 src/storages/getRolloutPlan.ts create mode 100644 src/storages/setRolloutPlan.ts delete mode 100644 src/utils/inputValidation/__tests__/preloadedData.spec.ts delete mode 100644 src/utils/inputValidation/preloadedData.ts diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts new file mode 100644 index 00000000..3f1de562 --- /dev/null +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -0,0 +1,133 @@ +import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; +import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; +import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; +import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; +import { IRBSegment, ISplit } from '../../dtos/types'; + +import { validateRolloutPlan, setRolloutPlan } from '../setRolloutPlan'; +import { getRolloutPlan } from '../getRolloutPlan'; + +const otherKey = 'otherKey'; +const expectedRolloutPlan = { + splitChanges: { + ff: { d: [{ name: 'split1' }], t: 123, s: -1 }, + rbs: { d: [{ name: 'rbs1' }], t: 321, s: -1 } + }, + memberships: { + [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, + [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } + }, + segmentChanges: [{ + name: 'segment1', + added: [fullSettings.core.key as string, otherKey], + removed: [], + since: -1, + till: 123 + }] +}; + +describe('validateRolloutPlan', () => { + afterEach(() => { + loggerMock.mockClear(); + }); + + test('valid rollout plan and mode', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'standalone', initialRolloutPlan: expectedRolloutPlan } as any)).toEqual(expectedRolloutPlan); + expect(loggerMock.error).not.toHaveBeenCalled(); + }); + + test('invalid rollout plan', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'standalone', initialRolloutPlan: {} } as any)).toBeUndefined(); + expect(loggerMock.error).toHaveBeenCalledWith('storage: invalid rollout plan provided'); + }); + + test('invalid mode', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'consumer', initialRolloutPlan: expectedRolloutPlan } as any)).toBeUndefined(); + expect(loggerMock.warn).toHaveBeenCalledWith('storage: initial rollout plan is ignored in consumer mode'); + }); +}); + +describe('getRolloutPlan & setRolloutPlan (client-side)', () => { + // @ts-expect-error Load server-side storage + const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); + serverStorage.splits.update([{ name: 'split1' } as ISplit], [], 123); + serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); + serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); + + afterEach(() => { + jest.clearAllMocks(); + }); + + test('using preloaded data (no memberships, no segments)', () => { + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage); + + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual([]); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual([]); + + // Get preloaded data from client-side storage + expect(getRolloutPlan(loggerMock, clientStorage)).toEqual(rolloutPlan); + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: undefined, segmentChanges: undefined }); + }); + + test('using preloaded data with memberships', () => { + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string, otherKey] }); + + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + + // @TODO requires internal storage cache for `shared` storages + // // Get preloaded data from client-side storage + // expect(getRolloutPlan(loggerMock, clientStorage, { keys: [fullSettings.core.key as string, otherKey] })).toEqual(rolloutPlan); + // expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, segmentChanges: undefined }); + }); + + test('using preloaded data with segments', () => { + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { exposeSegments: true }); + + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: undefined }); + }); + + test('using preloaded data with memberships and segments', () => { + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string], exposeSegments: true }); + + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // main client membership is set via the rollout plan `memberships` field + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // shared client membership is set via the rollout plan `segmentChanges` field + + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: { [fullSettings.core.key as string]: expectedRolloutPlan.memberships![fullSettings.core.key as string] } }); + }); +}); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts deleted file mode 100644 index 49522bce..00000000 --- a/src/storages/dataLoader.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { PreloadedData } from '../types'; -import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; - -// This value might be eventually set via a config parameter -const DEFAULT_CACHE_EXPIRATION_IN_MILLIS = 864000000; // 10 days - -/** - * Factory of client-side storage loader - * - * @param preloadedData - validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader - * and extended with a `mySegmentsData` property. - * @returns function to preload the storage - */ -export function dataLoaderFactory(preloadedData: PreloadedData): DataLoader { - - /** - * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function - * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) - * - * @param storage - object containing `splits` and `segments` cache (client-side variant) - * @param userId - user key string of the provided MySegmentsCache - */ - // @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. - // @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. - return function loadData(storage: { splits: ISplitsCacheSync, segments: ISegmentsCacheSync }, userId: string) { - // Do not load data if current preloadedData is empty - if (Object.keys(preloadedData).length === 0) return; - - const { lastUpdated = -1, segmentsData = {}, since = -1, splitsData = {} } = preloadedData; - - const storedSince = storage.splits.getChangeNumber(); - const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; - - // Do not load data if current localStorage data is more recent, - // or if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, - if (storedSince > since || lastUpdated < expirationTimestamp) return; - - // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data - storage.splits.clear(); - - // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.splits.update(Object.keys(splitsData).map(splitName => JSON.parse(splitsData[splitName])), [], since); - - // add mySegments data - let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[userId]; - if (!mySegmentsData) { - // segmentsData in an object where the property is the segment name and the pertaining value is a stringified object that contains the `added` array of userIds - mySegmentsData = Object.keys(segmentsData).filter(segmentName => { - const userIds = JSON.parse(segmentsData[segmentName]).added; - return Array.isArray(userIds) && userIds.indexOf(userId) > -1; - }); - } - storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); - }; -} diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts new file mode 100644 index 00000000..d4ac25d8 --- /dev/null +++ b/src/storages/getRolloutPlan.ts @@ -0,0 +1,74 @@ +import SplitIO from '../../types/splitio'; +import { IStorageSync } from './types'; +import { setToArray } from '../utils/lang/sets'; +import { getMatching } from '../utils/key'; +import { ILogger } from '../logger/types'; +import { RolloutPlan } from './types'; +import { IMembershipsResponse, IMySegmentsResponse } from '../dtos/types'; + +/** + * Gets the rollout plan snapshot from the given synchronous storage. + * If `keys` are provided, the memberships for those keys is returned, to protect segments data. + * Otherwise, the segments data is returned. + */ +export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { + + const { keys, exposeSegments } = options; + const { splits, segments, rbSegments } = storage; + + log.debug(`storage: get feature flags${keys ? `, and memberships for keys ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); + + return { + splitChanges: { + ff: { + t: splits.getChangeNumber(), + s: -1, + d: splits.getAll(), + }, + rbs: { + t: rbSegments.getChangeNumber(), + s: -1, + d: rbSegments.getAll(), + } + }, + segmentChanges: exposeSegments ? // @ts-ignore accessing private prop + Object.keys(segments.segmentCache).map(segmentName => ({ + name: segmentName, // @ts-ignore + added: setToArray(segments.segmentCache[segmentName] as Set), + removed: [], + since: -1, + till: segments.getChangeNumber(segmentName)! + })) : + undefined, + memberships: keys ? + keys.reduce>((prev, key) => { + const matchingKey = getMatching(key); + if (storage.shared) { // Client-side segments + const sharedStorage = storage.shared(matchingKey); + prev[matchingKey] = { + ms: { // @ts-ignore + k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), + }, + ls: sharedStorage.largeSegments ? { // @ts-ignore + k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), + } : undefined + }; + } else { // Server-side segments + prev[matchingKey] = { + ms: { // @ts-ignore + k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore + return storage.segments.segmentCache[segmentName].has(matchingKey) ? + prev!.concat({ n: segmentName }) : + prev; + }, []) + }, + ls: { + k: [] + } + }; + } + return prev; + }, {}) : + undefined + }; +} diff --git a/src/storages/setRolloutPlan.ts b/src/storages/setRolloutPlan.ts new file mode 100644 index 00000000..a8529231 --- /dev/null +++ b/src/storages/setRolloutPlan.ts @@ -0,0 +1,71 @@ +import SplitIO from '../../types/splitio'; +import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync } from './types'; +import { ILogger } from '../logger/types'; +import { isObject } from '../utils/lang'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; +import { RolloutPlan } from './types'; + +/** + * Validates if the given rollout plan is valid. + */ +export function validateRolloutPlan(log: ILogger, settings: SplitIO.ISettings): RolloutPlan | undefined { + const { mode, initialRolloutPlan } = settings; + + if (isConsumerMode(mode)) { + log.warn('storage: initial rollout plan is ignored in consumer mode'); + return; + } + + if (isObject(initialRolloutPlan) && isObject((initialRolloutPlan as any).splitChanges)) return initialRolloutPlan as RolloutPlan; + + log.error('storage: invalid rollout plan provided'); + return; +} + +/** + * Sets the given synchronous storage with the provided rollout plan snapshot. + * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). + * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). + */ +export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { + const { splits, rbSegments, segments, largeSegments } = storage; + const { splitChanges: { ff, rbs } } = rolloutPlan; + + log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); + + if (splits && ff) { + splits.clear(); + splits.update(ff.d, [], ff.t); + } + + if (rbSegments && rbs) { + rbSegments.clear(); + rbSegments.update(rbs.d, [], rbs.t); + } + + const segmentChanges = rolloutPlan.segmentChanges; + if (matchingKey) { // add memberships data (client-side) + let memberships = rolloutPlan.memberships && rolloutPlan.memberships[matchingKey]; + if (!memberships && segmentChanges) { + memberships = { + ms: { + k: segmentChanges.filter(segment => { + return segment.added.indexOf(matchingKey) > -1; + }).map(segment => ({ n: segment.name })) + } + }; + } + + if (memberships) { + if (memberships.ms) segments.resetSegments(memberships.ms!); + if (memberships.ls && largeSegments) largeSegments.resetSegments(memberships.ls!); + } + } else { // add segments data (server-side) + if (segmentChanges) { + segments.clear(); + segmentChanges.forEach(segment => { + segments.update(segment.name, segment.added, segment.removed, segment.till); + }); + } + } +} diff --git a/src/storages/types.ts b/src/storages/types.ts index 97664de5..b1fa8081 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,5 +1,5 @@ import SplitIO from '../../types/splitio'; -import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse } from '../dtos/types'; +import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse, IMembershipsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { ISettings } from '../types'; @@ -522,3 +522,21 @@ export type IStorageAsyncFactory = SplitIO.StorageAsyncFactory & { readonly type: SplitIO.StorageType, (params: IStorageFactoryParams): IStorageAsync } + +export type RolloutPlan = { + /** + * Feature flags and rule-based segments. + */ + splitChanges: ISplitChangesResponse; + /** + * Optional map of matching keys to their memberships. + */ + memberships?: { + [matchingKey: string]: IMembershipsResponse; + }; + /** + * Optional list of standard segments. + * This property is ignored if `memberships` is provided. + */ + segmentChanges?: ISegmentChangesResponse[]; +}; diff --git a/src/types.ts b/src/types.ts index bdb0933c..ad3fa04c 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,6 +1,7 @@ import SplitIO from '../types/splitio'; import { ISplitFiltersValidation } from './dtos/types'; import { ILogger } from './logger/types'; +import { RolloutPlan } from './storages/types'; /** * SplitIO.ISettings interface extended with private properties for internal use @@ -10,6 +11,7 @@ export interface ISettings extends SplitIO.ISettings { __splitFiltersValidation: ISplitFiltersValidation; }; readonly log: ILogger; + readonly initialRolloutPlan?: RolloutPlan; } /** @@ -42,38 +44,3 @@ export interface IBasicClient extends SplitIO.IBasicClient { isClientSide?: boolean; key?: SplitIO.SplitKey; } -/** - * Defines the format of rollout plan data to preload the factory storage (cache). - */ -export interface PreloadedData { - /** - * Timestamp of the last moment the data was synchronized with Split servers. - * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. - */ - // @TODO configurable expiration time policy? - lastUpdated: number; - /** - * Change number of the preloaded data. - * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. - */ - since: number; - /** - * Map of feature flags to their stringified definitions. - */ - splitsData: { - [splitName: string]: string; - }; - /** - * Optional map of user keys to their list of segments. - */ - mySegmentsData?: { - [key: string]: string[]; - }; - /** - * Optional map of segments to their stringified definitions. - * This property is ignored if `mySegmentsData` was provided. - */ - segmentsData?: { - [segmentName: string]: string; - }; -} diff --git a/src/utils/inputValidation/__tests__/preloadedData.spec.ts b/src/utils/inputValidation/__tests__/preloadedData.spec.ts deleted file mode 100644 index 79f1d1a4..00000000 --- a/src/utils/inputValidation/__tests__/preloadedData.spec.ts +++ /dev/null @@ -1,157 +0,0 @@ -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; - -// Import the module mocking the logger. -import { validatePreloadedData } from '../preloadedData'; - -const method = 'some_method'; -const testCases = [ - // valid inputs - { - input: { lastUpdated: 10, since: 10, splitsData: {} }, - output: true, - warn: `${method}: preloadedData.splitsData doesn't contain feature flag definitions.` - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { 'some_key': [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { 'some_key': [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: {} }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: ['some_segment'] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, segmentsData: {} }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, segmentsData: { some_segment: 'SEGMENT DEFINITION' } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: ['some_segment'], some_other_key: ['some_segment'] }, segmentsData: { some_segment: 'SEGMENT DEFINITION', some_other_segment: 'SEGMENT DEFINITION' } }, - output: true - }, - { - // should be true, even using objects for strings and numbers or having extra properties - input: { ignoredProperty: 'IGNORED', lastUpdated: new Number(10), since: new Number(10), splitsData: { 'some_split': new String('SPLIT DEFINITION') }, mySegmentsData: { some_key: [new String('some_segment')] }, segmentsData: { some_segment: new String('SEGMENT DEFINITION') } }, - output: true - }, - - // invalid inputs - { - // should be false if preloadedData is not an object - input: undefined, - output: false, - error: `${method}: preloadedData must be an object.` - }, - { - // should be false if preloadedData is not an object - input: [], - output: false, - error: `${method}: preloadedData must be an object.` - }, - { - // should be false if lastUpdated property is invalid - input: { lastUpdated: undefined, since: 10, splitsData: {} }, - output: false, - error: `${method}: preloadedData.lastUpdated must be a positive number.` - }, - { - // should be false if lastUpdated property is invalid - input: { lastUpdated: -1, since: 10, splitsData: {} }, - output: false, - error: `${method}: preloadedData.lastUpdated must be a positive number.` - }, - { - // should be false if since property is invalid - input: { lastUpdated: 10, since: undefined, splitsData: {} }, - output: false, - error: `${method}: preloadedData.since must be a positive number.` - }, - { - // should be false if since property is invalid - input: { lastUpdated: 10, since: -1, splitsData: {} }, - output: false, - error: `${method}: preloadedData.since must be a positive number.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: undefined }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: undefined } }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if mySegmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, mySegmentsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.` - }, - { - // should be false if mySegmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, mySegmentsData: { some_key: undefined } }, - output: false, - error: `${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.` - }, - { - // should be false if segmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, segmentsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.` - }, - { - // should be false if segmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, segmentsData: { some_segment: undefined } }, - output: false, - error: `${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.` - } -]; - -test('INPUT VALIDATION for preloadedData', () => { - - for (let i = 0; i < testCases.length; i++) { - const testCase = testCases[i]; - expect(validatePreloadedData(loggerMock, testCase.input, method)).toBe(testCase.output); - - if (testCase.error) { - expect(loggerMock.error.mock.calls[0]).toEqual([testCase.error]); // Should log the error for the invalid preloadedData. - loggerMock.error.mockClear(); - } else { - expect(loggerMock.error).not.toBeCalled(); // Should not log any error. - } - - if (testCase.warn) { - expect(loggerMock.warn.mock.calls[0]).toEqual([testCase.warn]); // Should log the warning for the given preloadedData. - loggerMock.warn.mockClear(); - } else { - expect(loggerMock.warn).not.toBeCalled(); // Should not log any warning. - } - } -}); diff --git a/src/utils/inputValidation/index.ts b/src/utils/inputValidation/index.ts index 96cf4be6..eac9777d 100644 --- a/src/utils/inputValidation/index.ts +++ b/src/utils/inputValidation/index.ts @@ -10,5 +10,4 @@ export { validateTrafficType } from './trafficType'; export { validateIfNotDestroyed, validateIfOperational } from './isOperational'; export { validateSplitExistence } from './splitExistence'; export { validateTrafficTypeExistence } from './trafficTypeExistence'; -export { validatePreloadedData } from './preloadedData'; export { validateEvaluationOptions } from './eventProperties'; diff --git a/src/utils/inputValidation/preloadedData.ts b/src/utils/inputValidation/preloadedData.ts deleted file mode 100644 index f07ee432..00000000 --- a/src/utils/inputValidation/preloadedData.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { isObject, isString, isFiniteNumber } from '../lang'; -import { validateSplit } from './split'; -import { ILogger } from '../../logger/types'; - -function validateTimestampData(log: ILogger, maybeTimestamp: any, method: string, item: string) { - if (isFiniteNumber(maybeTimestamp) && maybeTimestamp > -1) return true; - log.error(`${method}: preloadedData.${item} must be a positive number.`); - return false; -} - -function validateSplitsData(log: ILogger, maybeSplitsData: any, method: string) { - if (isObject(maybeSplitsData)) { - const splitNames = Object.keys(maybeSplitsData); - if (splitNames.length === 0) log.warn(`${method}: preloadedData.splitsData doesn't contain feature flag definitions.`); - // @TODO in the future, consider handling the possibility of having parsed definitions of splits - if (splitNames.every(splitName => validateSplit(log, splitName, method) && isString(maybeSplitsData[splitName]))) return true; - } - log.error(`${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.`); - return false; -} - -function validateMySegmentsData(log: ILogger, maybeMySegmentsData: any, method: string) { - if (isObject(maybeMySegmentsData)) { - const userKeys = Object.keys(maybeMySegmentsData); - if (userKeys.every(userKey => { - const segmentNames = maybeMySegmentsData[userKey]; - // an empty list is valid - return Array.isArray(segmentNames) && segmentNames.every(segmentName => isString(segmentName)); - })) return true; - } - log.error(`${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.`); - return false; -} - -function validateSegmentsData(log: ILogger, maybeSegmentsData: any, method: string) { - if (isObject(maybeSegmentsData)) { - const segmentNames = Object.keys(maybeSegmentsData); - if (segmentNames.every(segmentName => isString(maybeSegmentsData[segmentName]))) return true; - } - log.error(`${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.`); - return false; -} - -export function validatePreloadedData(log: ILogger, maybePreloadedData: any, method: string) { - if (!isObject(maybePreloadedData)) { - log.error(`${method}: preloadedData must be an object.`); - } else if ( - validateTimestampData(log, maybePreloadedData.lastUpdated, method, 'lastUpdated') && - validateTimestampData(log, maybePreloadedData.since, method, 'since') && - validateSplitsData(log, maybePreloadedData.splitsData, method) && - (!maybePreloadedData.mySegmentsData || validateMySegmentsData(log, maybePreloadedData.mySegmentsData, method)) && - (!maybePreloadedData.segmentsData || validateSegmentsData(log, maybePreloadedData.segmentsData, method)) - ) { - return true; - } - return false; -} diff --git a/types/splitio.d.ts b/types/splitio.d.ts index e85ab01b..3a9fe72d 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -350,6 +350,11 @@ interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISync * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} */ features?: SplitIO.MockedFeaturesMap; + /** + * Rollout plan object (i.e., feature flags and segment definitions) to initialize the SDK storage with. If provided and valid, the SDK will be ready from cache immediately. + * This object is derived from calling the Node.js SDK’s `getRolloutPlan` method. + */ + initialRolloutPlan?: SplitIO.RolloutPlan; /** * SDK Startup settings. */ @@ -555,6 +560,7 @@ declare namespace SplitIO { eventsFirstPushWindow: number; }; readonly storage: StorageSyncFactory | StorageAsyncFactory | StorageOptions; + readonly initialRolloutPlan?: SplitIO.RolloutPlan; readonly urls: { events: string; sdk: string; @@ -1020,7 +1026,28 @@ declare namespace SplitIO { type: NodeSyncStorage | NodeAsyncStorage | BrowserStorage; prefix?: string; options?: Object; - } + }; + /** + * A JSON-serializable plain object that defines the format of rollout plan data to preload the SDK cache with feature flags and segments. + */ + type RolloutPlan = Object; + /** + * Options for the `factory.getRolloutPlan` method. + */ + type RolloutPlanOptions = { + /** + * Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys. + * + * @defaultValue `undefined` + */ + keys?: SplitKey[]; + /** + * Optional flag to expose segments data in the rollout plan snapshot. + * + * @defaultValue `false` + */ + exposeSegments?: boolean; + }; /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. @@ -1043,7 +1070,7 @@ declare namespace SplitIO { type IntegrationFactory = { readonly type: string; (params: any): (Integration | void); - } + }; /** * A pair of user key and it's trafficType, required for tracking valid Split events. */ @@ -1564,6 +1591,13 @@ declare namespace SplitIO { * @returns The manager instance. */ manager(): IManager; + /** + * Returns the current snapshot of the SDK rollout plan in cache. + * + * @param keys - Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys, rather than the complete segments data. + * @returns The current snapshot of the SDK rollout plan. + */ + getRolloutPlan(options?: RolloutPlanOptions): RolloutPlan; } /** * This represents the interface for the SDK instance for server-side with asynchronous storage. From 705057d0a6f83d78c2288ea3f0020e43d4266309 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 16:17:51 -0300 Subject: [PATCH 28/30] rc --- CHANGES.txt | 2 +- package-lock.json | 4 ++-- package.json | 2 +- src/storages/getRolloutPlan.ts | 2 -- 4 files changed, 4 insertions(+), 6 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 50aef19e..4952c979 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,4 +1,4 @@ -2.5.0 (August XX, 2025) +2.5.0 (September 9, 2025) - Added `factory.getRolloutPlan()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - Added `initialRolloutPlan` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. diff --git a/package-lock.json b/package-lock.json index f9c7ba15..67e40526 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.0", + "version": "2.5.0-rc.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.0", + "version": "2.5.0-rc.1", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index 47c53107..ce1cc17d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.0", + "version": "2.5.0-rc.1", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts index d4ac25d8..80061426 100644 --- a/src/storages/getRolloutPlan.ts +++ b/src/storages/getRolloutPlan.ts @@ -8,8 +8,6 @@ import { IMembershipsResponse, IMySegmentsResponse } from '../dtos/types'; /** * Gets the rollout plan snapshot from the given synchronous storage. - * If `keys` are provided, the memberships for those keys is returned, to protect segments data. - * Otherwise, the segments data is returned. */ export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { From 09263b2854480dba3394a64023288ab3d3b881f4 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 10 Sep 2025 12:08:05 -0300 Subject: [PATCH 29/30] Stable version --- CHANGES.txt | 2 +- package-lock.json | 4 ++-- package.json | 2 +- src/storages/getRolloutPlan.ts | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 4952c979..4a80a5e8 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,4 +1,4 @@ -2.5.0 (September 9, 2025) +2.5.0 (September 10, 2025) - Added `factory.getRolloutPlan()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - Added `initialRolloutPlan` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. diff --git a/package-lock.json b/package-lock.json index 67e40526..14125ac1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.1", + "version": "2.5.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.1", + "version": "2.5.0", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index ce1cc17d..155f650a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.1", + "version": "2.5.0", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts index 80061426..40e6ea84 100644 --- a/src/storages/getRolloutPlan.ts +++ b/src/storages/getRolloutPlan.ts @@ -14,7 +14,7 @@ export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: Spl const { keys, exposeSegments } = options; const { splits, segments, rbSegments } = storage; - log.debug(`storage: get feature flags${keys ? `, and memberships for keys ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); + log.debug(`storage: get feature flags${keys ? `, and memberships for keys: ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); return { splitChanges: { From cf45f70792dbe1b15e1f6633e176936ed8887df7 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 10 Sep 2025 16:13:05 -0300 Subject: [PATCH 30/30] Fix type definition comment --- types/splitio.d.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 3a9fe72d..2680f8ef 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1594,7 +1594,14 @@ declare namespace SplitIO { /** * Returns the current snapshot of the SDK rollout plan in cache. * - * @param keys - Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys, rather than the complete segments data. + * Wait for the SDK client to be ready before calling this method. + * + * ```js + * await factory.client().ready(); + * const rolloutPlan = factory.getRolloutPlan(); + * ``` + * + * @param options - An object of type RolloutPlanOptions for advanced options. * @returns The current snapshot of the SDK rollout plan. */ getRolloutPlan(options?: RolloutPlanOptions): RolloutPlan;