From 121ec76b4deb3303f3ed5cc4a943c2253fcea98a Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 28 Apr 2026 18:59:18 -0300 Subject: [PATCH] Rename split to definition --- .../__tests__/evaluate-feature.spec.ts | 6 +- .../__tests__/evaluate-features.spec.ts | 42 ++-- src/evaluator/index.ts | 22 +- .../matchers/__tests__/dependency.spec.ts | 4 +- .../matchers/__tests__/prerequisites.spec.ts | 4 +- .../matchers/__tests__/rbsegment.spec.ts | 8 +- .../__tests__/readinessManager.spec.ts | 56 ++--- .../__tests__/sdkReadinessManager.spec.ts | 10 +- src/readiness/constants.ts | 6 +- src/readiness/readinessManager.ts | 44 ++-- src/readiness/types.ts | 20 +- src/sdkClient/__tests__/trackMethod.spec.ts | 4 +- src/sdkClient/trackMethod.ts | 4 +- src/sdkFactory/__tests__/index.spec.ts | 2 +- src/sdkFactory/index.ts | 10 +- .../__tests__/index.asyncCache.spec.ts | 12 +- .../__tests__/index.syncCache.spec.ts | 6 +- src/sdkManager/index.ts | 12 +- src/storages/AbstractDefinitionsCacheAsync.ts | 63 +++++ src/storages/AbstractDefinitionsCacheSync.ts | 95 +++++++ src/storages/AbstractSplitsCacheAsync.ts | 63 ----- src/storages/AbstractSplitsCacheSync.ts | 95 ------- src/storages/KeyBuilder.ts | 12 +- src/storages/KeyBuilderCS.ts | 12 +- src/storages/KeyBuilderSS.ts | 4 +- src/storages/__tests__/KeyBuilder.spec.ts | 12 +- src/storages/__tests__/dataLoader.spec.ts | 2 +- src/storages/__tests__/testUtils.ts | 2 +- src/storages/getRolloutPlan.ts | 6 +- .../inLocalStorage/DefinitionsCacheInLocal.ts | 234 ++++++++++++++++++ .../inLocalStorage/RBSegmentsCacheInLocal.ts | 12 +- .../inLocalStorage/SplitsCacheInLocal.ts | 234 ------------------ ...pec.ts => DefinitionsCacheInLocal.spec.ts} | 128 +++++----- .../__tests__/validateCache.spec.ts | 48 ++-- src/storages/inLocalStorage/index.ts | 12 +- src/storages/inLocalStorage/validateCache.ts | 8 +- .../inMemory/DefinitionsCacheInMemory.ts | 130 ++++++++++ src/storages/inMemory/InMemoryStorage.ts | 8 +- src/storages/inMemory/InMemoryStorageCS.ts | 10 +- .../inMemory/RBSegmentsCacheInMemory.ts | 2 +- .../inMemory/SegmentsCacheInMemory.ts | 2 +- src/storages/inMemory/SplitsCacheInMemory.ts | 130 ---------- .../inMemory/TelemetryCacheInMemory.ts | 6 +- .../DefinitionsCacheInMemory.spec.ts | 190 ++++++++++++++ .../__tests__/SplitsCacheInMemory.spec.ts | 190 -------------- ...eInRedis.ts => DefinitionsCacheInRedis.ts} | 134 +++++----- ...pec.ts => DefinitionsCacheInRedis.spec.ts} | 122 ++++----- src/storages/inRedis/index.ts | 4 +- .../pluggable/DefinitionsCachePluggable.ts | 230 +++++++++++++++++ .../pluggable/SplitsCachePluggable.ts | 230 ----------------- ...c.ts => DefinitionsCachePluggable.spec.ts} | 118 ++++----- .../pluggable/__tests__/index.spec.ts | 6 +- src/storages/pluggable/index.ts | 4 +- src/storages/setRolloutPlan.ts | 12 +- src/storages/types.ts | 38 +-- src/sync/__tests__/syncManagerOnline.spec.ts | 10 +- .../offline/syncTasks/fromObjectSyncTask.ts | 16 +- .../polling/fetchers/splitChangesFetcher.ts | 2 +- src/sync/polling/pollingManagerCS.ts | 8 +- .../polling/syncTasks/definitionsSyncTask.ts | 2 +- src/sync/polling/types.ts | 2 +- .../definitionChangesUpdater.spec.ts | 76 +++--- .../__tests__/mySegmentsUpdater.spec.ts | 6 +- .../updaters/definitionChangesUpdater.ts | 22 +- .../polling/updaters/mySegmentsUpdater.ts | 2 +- ...teWorker.ts => DefinitionsUpdateWorker.ts} | 32 +-- ...pec.ts => DefinitionsUpdateWorker.spec.ts} | 90 +++---- src/sync/streaming/pushManager.ts | 14 +- src/sync/syncManagerOnline.ts | 6 +- .../__tests__/trafficTypeExistence.spec.ts | 4 +- .../inputValidation/trafficTypeExistence.ts | 4 +- 71 files changed, 1588 insertions(+), 1588 deletions(-) create mode 100644 src/storages/AbstractDefinitionsCacheAsync.ts create mode 100644 src/storages/AbstractDefinitionsCacheSync.ts delete mode 100644 src/storages/AbstractSplitsCacheAsync.ts delete mode 100644 src/storages/AbstractSplitsCacheSync.ts create mode 100644 src/storages/inLocalStorage/DefinitionsCacheInLocal.ts delete mode 100644 src/storages/inLocalStorage/SplitsCacheInLocal.ts rename src/storages/inLocalStorage/__tests__/{SplitsCacheInLocal.spec.ts => DefinitionsCacheInLocal.spec.ts} (55%) create mode 100644 src/storages/inMemory/DefinitionsCacheInMemory.ts delete mode 100644 src/storages/inMemory/SplitsCacheInMemory.ts create mode 100644 src/storages/inMemory/__tests__/DefinitionsCacheInMemory.spec.ts delete mode 100644 src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts rename src/storages/inRedis/{SplitsCacheInRedis.ts => DefinitionsCacheInRedis.ts} (56%) rename src/storages/inRedis/__tests__/{SplitsCacheInRedis.spec.ts => DefinitionsCacheInRedis.spec.ts} (57%) create mode 100644 src/storages/pluggable/DefinitionsCachePluggable.ts delete mode 100644 src/storages/pluggable/SplitsCachePluggable.ts rename src/storages/pluggable/__tests__/{SplitsCachePluggable.spec.ts => DefinitionsCachePluggable.spec.ts} (52%) rename src/sync/streaming/UpdateWorkers/{SplitsUpdateWorker.ts => DefinitionsUpdateWorker.ts} (75%) rename src/sync/streaming/UpdateWorkers/__tests__/{SplitsUpdateWorker.spec.ts => DefinitionsUpdateWorker.spec.ts} (69%) diff --git a/src/evaluator/__tests__/evaluate-feature.spec.ts b/src/evaluator/__tests__/evaluate-feature.spec.ts index 500bf5ca..65c5e0a4 100644 --- a/src/evaluator/__tests__/evaluate-feature.spec.ts +++ b/src/evaluator/__tests__/evaluate-feature.spec.ts @@ -16,8 +16,8 @@ const splitsMock: Record = { }; const mockStorage = { - splits: { - getSplit(name: string) { + definitions: { + get(name: string) { if (name === 'throw_exception') throw new Error('Error'); if (splitsMock[name]) return splitsMock[name]; @@ -43,7 +43,7 @@ test('EVALUATOR / should return label exception, treatment control and config nu // This validation is async because the only exception possible when retrieving a Split would happen with Async storages. const evaluation = await evaluationPromise; - expect(evaluation).toEqual(expectedOutput); // If there was an error on the getSplits we should get the results for exception. + expect(evaluation).toEqual(expectedOutput); // If there was an error on the get method, we should get the results for exception. }); diff --git a/src/evaluator/__tests__/evaluate-features.spec.ts b/src/evaluator/__tests__/evaluate-features.spec.ts index d4225cc2..9f807be3 100644 --- a/src/evaluator/__tests__/evaluate-features.spec.ts +++ b/src/evaluator/__tests__/evaluate-features.spec.ts @@ -22,20 +22,20 @@ const flagSetsMock: Record> = { }; const mockStorage = { - splits: { - getSplit(name: string) { + definitions: { + get(name: string) { if (name === 'throw_exception') throw new Error('Error'); if (splitsMock[name]) return splitsMock[name]; return null; }, - getSplits(names: string[]) { + getMany(names: string[]) { return names.reduce((acc, name) => { - acc[name] = this.getSplit(name); + acc[name] = this.get(name); return acc; }, {} as Record); }, - getNamesByFlagSets(flagSets: string[]) { + getNamesBySets(flagSets: string[]) { return flagSets.map(flagset => flagSetsMock[flagset] || new Set()); } } @@ -59,7 +59,7 @@ test('EVALUATOR - Multiple evaluations at once / should return label exception, mockStorage, ); - expect(evaluation).toEqual(expectedOutput); // If there was an error on the `getSplits` we should get the results for exception. + expect(evaluation).toEqual(expectedOutput); // If there was an error on the get method, we should get the results for exception. }); @@ -126,7 +126,7 @@ describe('EVALUATOR - Multiple evaluations at once by flag sets', () => { }, }; - const getResultsByFlagsets = (flagSets: string[], storage = mockStorage) => { + const getResultsByFlagSets = (flagSets: string[], storage = mockStorage) => { return evaluateFeaturesByFlagSets( loggerMock, 'fake-key', @@ -139,7 +139,7 @@ describe('EVALUATOR - Multiple evaluations at once by flag sets', () => { test('should return right labels, treatments and configs if storage returns without errors', async () => { - let multipleEvaluationAtOnceByFlagSets = await getResultsByFlagsets(['reg_and_config', 'arch_and_killed']); + let multipleEvaluationAtOnceByFlagSets = await getResultsByFlagSets(['reg_and_config', 'arch_and_killed']); // assert evaluationWithConfig expect(multipleEvaluationAtOnceByFlagSets['config']).toEqual(expectedOutput['config']); // If the split is retrieved successfully we should get the right evaluation result, label and config. @@ -158,10 +158,10 @@ describe('EVALUATOR - Multiple evaluations at once by flag sets', () => { // assert not_existent_split not in evaluation if it is not related to defined flag sets expect(multipleEvaluationAtOnceByFlagSets['not_existent_split']).toEqual(undefined); - multipleEvaluationAtOnceByFlagSets = await getResultsByFlagsets([]); + multipleEvaluationAtOnceByFlagSets = await getResultsByFlagSets([]); expect(multipleEvaluationAtOnceByFlagSets).toEqual({}); - multipleEvaluationAtOnceByFlagSets = await getResultsByFlagsets(['reg_and_config']); + multipleEvaluationAtOnceByFlagSets = await getResultsByFlagSets(['reg_and_config']); expect(multipleEvaluationAtOnceByFlagSets['config']).toEqual(expectedOutput['config']); expect(multipleEvaluationAtOnceByFlagSets['regular']).toEqual({ ...expectedOutput['config'], config: null }); expect(multipleEvaluationAtOnceByFlagSets['killed']).toEqual(undefined); @@ -169,28 +169,28 @@ describe('EVALUATOR - Multiple evaluations at once by flag sets', () => { }); test('should log a warning if evaluating with flag sets that doesn\'t contain cached feature flags', async () => { - const getSplitsSpy = jest.spyOn(mockStorage.splits, 'getSplits'); + const getManySpy = jest.spyOn(mockStorage.definitions, 'getMany'); - // No flag set contains cached feature flags -> getSplits method is not called - expect(getResultsByFlagsets(['inexistent_set1', 'inexistent_set2'])).toEqual({}); - expect(getSplitsSpy).not.toHaveBeenCalled(); + // No flag set contains cached feature flags -> getMany method is not called + expect(getResultsByFlagSets(['inexistent_set1', 'inexistent_set2'])).toEqual({}); + expect(getManySpy).not.toHaveBeenCalled(); expect(loggerMock.warn.mock.calls).toEqual([ [WARN_FLAGSET_WITHOUT_FLAGS, ['method-name', 'inexistent_set1']], [WARN_FLAGSET_WITHOUT_FLAGS, ['method-name', 'inexistent_set2']], ]); - // One flag set contains cached feature flags -> getSplits method is called - expect(getResultsByFlagsets(['inexistent_set3', 'reg_and_config'])).toEqual(getResultsByFlagsets(['reg_and_config'])); - expect(getSplitsSpy).toHaveBeenLastCalledWith(['regular', 'config']); + // One flag set contains cached feature flags -> getMany method is called + expect(getResultsByFlagSets(['inexistent_set3', 'reg_and_config'])).toEqual(getResultsByFlagSets(['reg_and_config'])); + expect(getManySpy).toHaveBeenLastCalledWith(['regular', 'config']); expect(loggerMock.warn).toHaveBeenLastCalledWith(WARN_FLAGSET_WITHOUT_FLAGS, ['method-name', 'inexistent_set3']); - getSplitsSpy.mockRestore(); + getManySpy.mockRestore(); loggerMock.warn.mockClear(); // Should support async storage too - expect(await getResultsByFlagsets(['inexistent_set1', 'inexistent_set2'], { - splits: { - getNamesByFlagSets(flagSets: string[]) { return Promise.resolve(flagSets.map(flagset => flagSetsMock[flagset] || new Set())); } + expect(await getResultsByFlagSets(['inexistent_set1', 'inexistent_set2'], { + definitions: { + getNamesBySets(flagSets: string[]) { return Promise.resolve(flagSets.map(flagset => flagSetsMock[flagset] || new Set())); } } } as unknown as IStorageSync)).toEqual({}); expect(loggerMock.warn.mock.calls).toEqual([ diff --git a/src/evaluator/index.ts b/src/evaluator/index.ts index bc0a1e07..e5000527 100644 --- a/src/evaluator/index.ts +++ b/src/evaluator/index.ts @@ -41,9 +41,9 @@ export function evaluateFeature( let definition; try { - definition = storage.splits.getSplit(definitionName); + definition = storage.definitions.get(definitionName); } catch (e) { - // Exception on sync `getSplit` storage. Not possible ATM with InMemory and InLocal storages. + // Exception on sync storage. Not possible ATM with InMemory and InLocal storages. return EVALUATION_EXCEPTION; } @@ -56,7 +56,7 @@ export function evaluateFeature( storage, options, )).catch( - // Exception on async `getSplit` storage. For example, when the storage is redis or + // Exception on async storage. For example, when the storage is redis or // pluggable and there is a connection issue and we can't retrieve the split to be evaluated () => EVALUATION_EXCEPTION ); @@ -83,16 +83,16 @@ export function evaluateFeatures( let definitions; try { - definitions = storage.splits.getSplits(definitionNames); + definitions = storage.definitions.getMany(definitionNames); } catch (e) { - // Exception on sync `getSplits` storage. Not possible ATM with InMemory and InLocal storages. + // Exception on sync storage. Not possible ATM with InMemory and InLocal storages. return treatmentsException(definitionNames); } return thenable(definitions) ? definitions.then(definitions => getEvaluations(log, key, definitionNames, definitions, attributes, storage, options)) .catch(() => { - // Exception on async `getSplits` storage. For example, when the storage is redis or + // Exception on async storage. For example, when the storage is redis or // pluggable and there is a connection issue and we can't retrieve the split to be evaluated return treatmentsException(definitionNames); }) : @@ -128,7 +128,7 @@ export function evaluateFeaturesByFlagSets( // get features by flag sets try { - storedFlagNames = storage.splits.getNamesByFlagSets(flagSets); + storedFlagNames = storage.definitions.getNamesBySets(flagSets); } catch (e) { // return empty evaluations return {}; @@ -156,7 +156,7 @@ function getEvaluation( const split = engineParser(log, definition, storage); const evaluation = split.getTreatment(key, attributes, evaluateFeature); - // If the storage is async and the evaluated flag uses segments or dependencies, evaluation is thenable + // If the storage is async and the evaluated definition uses segments or dependencies, evaluation is thenable if (thenable(evaluation)) { return evaluation.then(result => { result.changeNumber = definition.changeNumber; @@ -183,7 +183,7 @@ function getEvaluations( log: ILogger, key: SplitIO.SplitKey, definitionNames: string[], - splits: Record, + definitions: Record, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync, options?: SplitIO.EvaluationOptions, @@ -194,7 +194,7 @@ function getEvaluations( const evaluation = getEvaluation( log, key, - splits[definitionName], + definitions[definitionName], attributes, storage, options @@ -218,7 +218,7 @@ export function evaluateDefaultTreatment( let definition; try { - definition = storage.splits.getSplit(definitionName); + definition = storage.definitions.get(definitionName); } catch (e) { return EVALUATION_EXCEPTION; } diff --git a/src/evaluator/matchers/__tests__/dependency.spec.ts b/src/evaluator/matchers/__tests__/dependency.spec.ts index c9463e18..d768df69 100644 --- a/src/evaluator/matchers/__tests__/dependency.spec.ts +++ b/src/evaluator/matchers/__tests__/dependency.spec.ts @@ -13,8 +13,8 @@ const STORED_SPLITS: Record = { }; const mockStorage = { - splits: { - getSplit: (name: string) => STORED_SPLITS[name] + definitions: { + get: (name: string) => STORED_SPLITS[name] } }; diff --git a/src/evaluator/matchers/__tests__/prerequisites.spec.ts b/src/evaluator/matchers/__tests__/prerequisites.spec.ts index 7b1a55cd..dfaf3a3e 100644 --- a/src/evaluator/matchers/__tests__/prerequisites.spec.ts +++ b/src/evaluator/matchers/__tests__/prerequisites.spec.ts @@ -11,8 +11,8 @@ const STORED_SPLITS: Record = { }; const mockStorage = { - splits: { - getSplit: (name: string) => STORED_SPLITS[name] + definitions: { + get: (name: string) => STORED_SPLITS[name] } } as IStorageSync; diff --git a/src/evaluator/matchers/__tests__/rbsegment.spec.ts b/src/evaluator/matchers/__tests__/rbsegment.spec.ts index f1048d17..1463eb21 100644 --- a/src/evaluator/matchers/__tests__/rbsegment.spec.ts +++ b/src/evaluator/matchers/__tests__/rbsegment.spec.ts @@ -178,8 +178,8 @@ const STORED_RBSEGMENTS: Record = { const mockStorageSync = { isSync: true, - splits: { - getSplit(name: string) { + definitions: { + get(name: string) { return STORED_SPLITS[name]; } }, @@ -202,8 +202,8 @@ const mockStorageSync = { const mockStorageAsync = { isSync: false, - splits: { - getSplit(name: string) { + definitions: { + get(name: string) { return Promise.resolve(STORED_SPLITS[name]); } }, diff --git a/src/readiness/__tests__/readinessManager.spec.ts b/src/readiness/__tests__/readinessManager.spec.ts index 2ece8380..23336059 100644 --- a/src/readiness/__tests__/readinessManager.spec.ts +++ b/src/readiness/__tests__/readinessManager.spec.ts @@ -1,7 +1,7 @@ import { readinessManagerFactory } from '../readinessManager'; import { EventEmitter } from '../../utils/MinEvents'; import { IReadinessManager } from '../types'; -import { SDK_READY, SDK_UPDATE, SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_READY_FROM_CACHE, SDK_SPLITS_CACHE_LOADED, SDK_READY_TIMED_OUT, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../constants'; +import { SDK_READY, SDK_UPDATE, SDK_DEFINITIONS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_READY_FROM_CACHE, SDK_DEFINITIONS_CACHE_LOADED, SDK_READY_TIMED_OUT, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../constants'; import { ISettings } from '../../types'; import { SdkUpdateMetadata, SdkReadyMetadata } from '../../../types/splitio'; @@ -52,7 +52,7 @@ test('READINESS MANAGER / Share splits but segments (without timeout enabled)', // Simulate state transitions setTimeout(() => { - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); }, 1000 * Math.random()); setTimeout(() => { readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); @@ -80,11 +80,11 @@ test('READINESS MANAGER / Ready event should be fired once', () => { counter++; }); - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); expect(counter).toBe(2); // should be called once @@ -100,13 +100,13 @@ test('READINESS MANAGER / Ready from cache event should be fired once', (done) = counter++; }); - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); + readinessManager.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); + readinessManager.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); setTimeout(() => { - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); + readinessManager.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); }, 0); - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); + readinessManager.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); + readinessManager.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); setTimeout(() => { expect(counter).toBe(1); // should be called only once @@ -128,12 +128,12 @@ test('READINESS MANAGER / Update event should be fired after the Ready event', ( isReady && counter++; }); - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); expect(counter).toBe(5); // should count 1 ready plus 4 updates @@ -153,7 +153,7 @@ test('READINESS MANAGER / Segment updates should not be propagated', (done) => { throw new Error('should not be called'); }); - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager2.segments.emit(SDK_SEGMENTS_ARRIVED); readinessManager2.segments.emit(SDK_SEGMENTS_ARRIVED); readinessManager2.segments.emit(SDK_SEGMENTS_ARRIVED); @@ -181,7 +181,7 @@ describe('READINESS MANAGER / Timeout event', () => { }); setTimeout(() => { - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); }, settingsWithTimeout.startup.readyTimeout + 20); }); @@ -235,7 +235,7 @@ test('READINESS MANAGER / Cancel timeout if ready fired', (done) => { }, settingsWithTimeout.startup.readyTimeout * 3); setTimeout(() => { - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); }, settingsWithTimeout.startup.readyTimeout * 0.8); }); @@ -252,7 +252,7 @@ test('READINESS MANAGER / Destroy after it was ready but before timedout', () => let lastUpdate = readinessManager.lastUpdate(); expect(lastUpdate).toBe(0); - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); // ready state expect(readinessManager.lastUpdate()).toBeGreaterThan(lastUpdate); @@ -291,7 +291,7 @@ test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { }, settingsWithTimeout.startup.readyTimeout * 0.5); setTimeout(() => { - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); // ready state if the readiness manager wasn't destroyed expect('Calling destroy should have removed the readyTimeout and the test should end now.'); @@ -304,7 +304,7 @@ test('READINESS MANAGER / SDK_UPDATE should emit with metadata', () => { const readinessManager = readinessManagerFactory(EventEmitter, settings); // SDK_READY - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); const metadata: SdkUpdateMetadata = { @@ -317,7 +317,7 @@ test('READINESS MANAGER / SDK_UPDATE should emit with metadata', () => { receivedMetadata = meta; }); - readinessManager.splits.emit(SDK_SPLITS_ARRIVED, metadata); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED, metadata); expect(receivedMetadata).toEqual(metadata); }); @@ -326,7 +326,7 @@ test('READINESS MANAGER / SDK_UPDATE should handle undefined metadata', () => { const readinessManager = readinessManagerFactory(EventEmitter, settings); // SDK_READY - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); let receivedMetadata: any; @@ -334,7 +334,7 @@ test('READINESS MANAGER / SDK_UPDATE should handle undefined metadata', () => { receivedMetadata = meta; }); - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); expect(receivedMetadata).toBeUndefined(); }); @@ -343,7 +343,7 @@ test('READINESS MANAGER / SDK_UPDATE should forward metadata from segments', () const readinessManager = readinessManagerFactory(EventEmitter, settings); // SDK_READY - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); const metadata: SdkUpdateMetadata = { @@ -371,7 +371,7 @@ test('READINESS MANAGER / SDK_READY_FROM_CACHE should emit with metadata when ca }); // Emit cache loaded event with timestamp - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { + readinessManager.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: cacheTimestamp }); @@ -390,7 +390,7 @@ test('READINESS MANAGER / SDK_READY_FROM_CACHE should emit with metadata when SD }); // Make SDK ready without cache first - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); expect(receivedMetadata).toBeDefined(); @@ -403,7 +403,7 @@ test('READINESS MANAGER / SDK_READY should emit with metadata when ready from ca const cacheTimestamp = Date.now() - 1000 * 60 * 60; // 1 hour ago // First emit cache loaded with timestamp - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: cacheTimestamp }); + readinessManager.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: cacheTimestamp }); let receivedMetadata: SdkReadyMetadata | undefined; readinessManager.gate.on(SDK_READY, (meta: SdkReadyMetadata) => { @@ -411,7 +411,7 @@ test('READINESS MANAGER / SDK_READY should emit with metadata when ready from ca }); // Make SDK ready - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); expect(receivedMetadata).toBeDefined(); @@ -428,7 +428,7 @@ test('READINESS MANAGER / SDK_READY should emit with metadata when ready without }); // Make SDK ready without cache - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); expect(receivedMetadata).toBeDefined(); diff --git a/src/readiness/__tests__/sdkReadinessManager.spec.ts b/src/readiness/__tests__/sdkReadinessManager.spec.ts index 01bb5016..54c7d590 100644 --- a/src/readiness/__tests__/sdkReadinessManager.spec.ts +++ b/src/readiness/__tests__/sdkReadinessManager.spec.ts @@ -1,7 +1,7 @@ // @ts-nocheck import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import SplitIO from '../../../types/splitio'; -import { SDK_READY, SDK_READY_FROM_CACHE, SDK_READY_TIMED_OUT, SDK_UPDATE, SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_SPLITS_CACHE_LOADED } from '../constants'; +import { SDK_READY, SDK_READY_FROM_CACHE, SDK_READY_TIMED_OUT, SDK_UPDATE, SDK_DEFINITIONS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_DEFINITIONS_CACHE_LOADED } from '../constants'; import { sdkReadinessManagerFactory } from '../sdkReadinessManager'; import { IReadinessManager } from '../types'; import { ERROR_CLIENT_LISTENER, CLIENT_READY_FROM_CACHE, CLIENT_READY } from '../../logger/constants'; @@ -21,13 +21,13 @@ const EventEmitterMock = jest.fn(() => ({ // Makes readinessManager emit SDK_READY & update isReady flag function emitReadyEvent(readinessManager: IReadinessManager) { if (readinessManager.gate instanceof EventEmitter) { - readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.definitions.emit(SDK_DEFINITIONS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); return; } - readinessManager.splits.once.mock.calls[0][1](); - readinessManager.splits.on.mock.calls[0][1](); + readinessManager.definitions.once.mock.calls[0][1](); + readinessManager.definitions.on.mock.calls[0][1](); readinessManager.segments.once.mock.calls[0][1](); readinessManager.segments.on.mock.calls[0][1](); readinessManager.gate.once.mock.calls[0][1](); @@ -148,7 +148,7 @@ describe('SDK Readiness Manager - Promises', () => { const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitter, fullSettings); // make the SDK ready from cache - sdkReadinessManager.readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: null }); + sdkReadinessManager.readinessManager.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: null }); expect(await sdkReadinessManager.sdkStatus.whenReadyFromCache()).toEqual({ initialCacheLoad: false, lastUpdateTimestamp: null }); // validate error log for SDK_READY_FROM_CACHE diff --git a/src/readiness/constants.ts b/src/readiness/constants.ts index 022100cc..3d9944eb 100644 --- a/src/readiness/constants.ts +++ b/src/readiness/constants.ts @@ -1,6 +1,6 @@ -// Splits events: -export const SDK_SPLITS_ARRIVED = 'state::splits-arrived'; -export const SDK_SPLITS_CACHE_LOADED = 'state::splits-cache-loaded'; +// Definitions events: +export const SDK_DEFINITIONS_ARRIVED = 'state::splits-arrived'; +export const SDK_DEFINITIONS_CACHE_LOADED = 'state::splits-cache-loaded'; // Segments events: export const SDK_SEGMENTS_ARRIVED = 'state::segments-arrived'; diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index 95d2fe41..0233b736 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -1,13 +1,13 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { ISettings } from '../types'; import SplitIO, { SdkReadyMetadata } from '../../types/splitio'; -import { SDK_SPLITS_ARRIVED, SDK_SPLITS_CACHE_LOADED, SDK_SEGMENTS_ARRIVED, SDK_READY_TIMED_OUT, SDK_READY_FROM_CACHE, SDK_UPDATE, SDK_READY } from './constants'; -import { IReadinessEventEmitter, IReadinessManager, ISegmentsEventEmitter, ISplitsEventEmitter } from './types'; +import { SDK_DEFINITIONS_ARRIVED, SDK_DEFINITIONS_CACHE_LOADED, SDK_SEGMENTS_ARRIVED, SDK_READY_TIMED_OUT, SDK_READY_FROM_CACHE, SDK_UPDATE, SDK_READY } from './constants'; +import { IReadinessEventEmitter, IReadinessManager, ISegmentsEventEmitter, IDefinitionsEventEmitter } from './types'; -function splitsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitter): ISplitsEventEmitter { - const splitsEventEmitter = objectAssign(new EventEmitter(), { - splitsArrived: false, - splitsCacheLoaded: false, +function definitionsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitter): IDefinitionsEventEmitter { + const definitionsEventEmitter = objectAssign(new EventEmitter(), { + definitionsArrived: false, + definitionsCacheLoaded: false, hasInit: false, initCallbacks: [] }); @@ -15,10 +15,10 @@ function splitsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitter // `isSplitKill` condition avoids an edge-case of wrongly emitting SDK_READY if: // - `/memberships` fetch and SPLIT_KILL occurs before `/splitChanges` fetch, and // - storage has cached splits (for which case `splitsStorage.killLocally` can return true) - splitsEventEmitter.on(SDK_SPLITS_ARRIVED, (metadata: SplitIO.SdkUpdateMetadata, isSplitKill: boolean) => { if (!isSplitKill) splitsEventEmitter.splitsArrived = true; }); - splitsEventEmitter.once(SDK_SPLITS_CACHE_LOADED, () => { splitsEventEmitter.splitsCacheLoaded = true; }); + definitionsEventEmitter.on(SDK_DEFINITIONS_ARRIVED, (metadata: SplitIO.SdkUpdateMetadata, isSplitKill: boolean) => { if (!isSplitKill) definitionsEventEmitter.definitionsArrived = true; }); + definitionsEventEmitter.once(SDK_DEFINITIONS_CACHE_LOADED, () => { definitionsEventEmitter.definitionsCacheLoaded = true; }); - return splitsEventEmitter; + return definitionsEventEmitter; } function segmentsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitter): ISegmentsEventEmitter { @@ -37,7 +37,7 @@ function segmentsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitt export function readinessManagerFactory( EventEmitter: new () => SplitIO.IEventEmitter, settings: ISettings, - splits: ISplitsEventEmitter = splitsEventEmitterFactory(EventEmitter), + definitions: IDefinitionsEventEmitter = definitionsEventEmitterFactory(EventEmitter), isShared?: boolean ): IReadinessManager { @@ -59,8 +59,8 @@ export function readinessManagerFactory( // emit SDK_READY_FROM_CACHE let isReadyFromCache = false; - if (splits.splitsCacheLoaded) isReadyFromCache = true; // ready from cache, but doesn't emit SDK_READY_FROM_CACHE - else splits.once(SDK_SPLITS_CACHE_LOADED, checkIsReadyFromCache); + if (definitions.definitionsCacheLoaded) isReadyFromCache = true; // ready from cache, but doesn't emit SDK_READY_FROM_CACHE + else definitions.once(SDK_DEFINITIONS_CACHE_LOADED, checkIsReadyFromCache); // emit SDK_READY_TIMED_OUT let hasTimedout = false; @@ -75,7 +75,7 @@ export function readinessManagerFactory( // emit SDK_READY and SDK_UPDATE let isReady = false; - splits.on(SDK_SPLITS_ARRIVED, checkIsReadyOrUpdate); + definitions.on(SDK_DEFINITIONS_ARRIVED, checkIsReadyOrUpdate); segments.on(SDK_SEGMENTS_ARRIVED, checkIsReadyOrUpdate); let isDestroyed = false; @@ -85,8 +85,8 @@ export function readinessManagerFactory( if (readyTimeout > 0 && !isReady) readyTimeoutId = setTimeout(timeout, readyTimeout); } - splits.initCallbacks.push(__init); - if (splits.hasInit) __init(); + definitions.initCallbacks.push(__init); + if (definitions.hasInit) __init(); function checkIsReadyFromCache(cacheMetadata: SdkReadyMetadata) { metadataReady = cacheMetadata; @@ -114,7 +114,7 @@ export function readinessManagerFactory( setTimeout(() => { throw e; }, 0); } } else { - if (splits.splitsArrived && segments.segmentsArrived) { + if (definitions.definitionsArrived && segments.segmentsArrived) { clearTimeout(readyTimeoutId); isReady = true; try { @@ -137,12 +137,12 @@ export function readinessManagerFactory( } return { - splits, + definitions, segments, gate, shared() { - return readinessManagerFactory(EventEmitter, settings, splits, true); + return readinessManagerFactory(EventEmitter, settings, definitions, true); }, // @TODO review/remove next methods when non-recoverable errors are reworked @@ -153,9 +153,9 @@ export function readinessManagerFactory( setDestroyed() { isDestroyed = true; }, init() { - if (splits.hasInit) return; - splits.hasInit = true; - splits.initCallbacks.forEach(cb => cb()); + if (definitions.hasInit) return; + definitions.hasInit = true; + definitions.initCallbacks.forEach(cb => cb()); }, destroy() { @@ -163,7 +163,7 @@ export function readinessManagerFactory( syncLastUpdate(); clearTimeout(readyTimeoutId); - if (!isShared) splits.hasInit = false; + if (!isShared) definitions.hasInit = false; }, isReady() { return isReady; }, diff --git a/src/readiness/types.ts b/src/readiness/types.ts index 34b57a53..99c2e17d 100644 --- a/src/readiness/types.ts +++ b/src/readiness/types.ts @@ -16,17 +16,17 @@ export interface IReadinessEventEmitter extends SplitIO.IEventEmitter { /** Splits data emitter */ -type SDK_SPLITS_ARRIVED = 'state::splits-arrived' -type SDK_SPLITS_CACHE_LOADED = 'state::splits-cache-loaded' -type ISplitsEvent = SDK_SPLITS_ARRIVED | SDK_SPLITS_CACHE_LOADED +type SDK_DEFINITIONS_ARRIVED = 'state::splits-arrived' +type SDK_DEFINITIONS_CACHE_LOADED = 'state::splits-cache-loaded' +type IDefinitionsEvent = SDK_DEFINITIONS_ARRIVED | SDK_DEFINITIONS_CACHE_LOADED -export interface ISplitsEventEmitter extends SplitIO.IEventEmitter { - emit(event: ISplitsEvent, ...args: any[]): boolean - on(event: ISplitsEvent, listener: (...args: any[]) => void): this; +export interface IDefinitionsEventEmitter extends SplitIO.IEventEmitter { + emit(event: IDefinitionsEvent, ...args: any[]): boolean + on(event: IDefinitionsEvent, listener: (...args: any[]) => void): this; on(event: SDK_UPDATE, listener: (metadata: SplitIO.SdkUpdateMetadata) => void): this; - once(event: ISplitsEvent, listener: (...args: any[]) => void): this; - splitsArrived: boolean - splitsCacheLoaded: boolean + once(event: IDefinitionsEvent, listener: (...args: any[]) => void): this; + definitionsArrived: boolean + definitionsCacheLoaded: boolean hasInit: boolean, initCallbacks: (() => void)[] } @@ -48,7 +48,7 @@ export interface ISegmentsEventEmitter extends SplitIO.IEventEmitter { export interface IReadinessManager { /** Event emitters */ - splits: ISplitsEventEmitter, + definitions: IDefinitionsEventEmitter, segments: ISegmentsEventEmitter, gate: IReadinessEventEmitter, diff --git a/src/sdkClient/__tests__/trackMethod.spec.ts b/src/sdkClient/__tests__/trackMethod.spec.ts index d64be4b9..ed0c118d 100644 --- a/src/sdkClient/__tests__/trackMethod.spec.ts +++ b/src/sdkClient/__tests__/trackMethod.spec.ts @@ -22,7 +22,7 @@ const telemetryTrackerMock = { const definitionsMock = { getAll: jest.fn(() => []), - getSplit: jest.fn(() => null), + get: jest.fn(() => null), trafficTypeExists: jest.fn(() => true), }; @@ -31,7 +31,7 @@ const trackMethodParams = { eventTracker: eventTrackerMock as any, telemetryTracker: telemetryTrackerMock as any, sdkReadinessManager: { readinessManager: readinessManagerMock }, - storage: { splits: definitionsMock }, + storage: { definitions: definitionsMock }, } as unknown as ISdkFactoryContext; describe('trackMethodFactory', () => { diff --git a/src/sdkClient/trackMethod.ts b/src/sdkClient/trackMethod.ts index bfe9bdc9..d8ab35d5 100644 --- a/src/sdkClient/trackMethod.ts +++ b/src/sdkClient/trackMethod.ts @@ -12,7 +12,7 @@ import { ISdkFactoryContext } from '../sdkFactory/types'; * Reusable by FF SDK client, Configs SDK, and thin-client SDK. */ export function trackMethodFactory(params: Pick, warnTTExistence = true) { - const { settings, storage: { splits }, telemetryTracker, eventTracker, sdkReadinessManager: { readinessManager } } = params; + const { settings, storage: { definitions }, telemetryTracker, eventTracker, sdkReadinessManager: { readinessManager } } = params; const { log, mode } = settings; const isAsync = isConsumerMode(mode); @@ -42,7 +42,7 @@ export function trackMethodFactory(params: Pick -1) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false /* Not an initial load, cache exists */ }); + if ((storage as IStorageSync).definitions.getChangeNumber() > -1) readiness.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, { initialCacheLoad: false /* Not an initial load, cache exists */ }); } const clients: Record void }> = {}; @@ -66,7 +66,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA // SDK client and manager const clientMethod = sdkClientMethodFactory(ctx); - const managerInstance = sdkManagerFactory(settings, storage.splits, sdkReadinessManager); + const managerInstance = sdkManagerFactory(settings, storage.definitions, sdkReadinessManager); function init() { diff --git a/src/sdkManager/__tests__/index.asyncCache.spec.ts b/src/sdkManager/__tests__/index.asyncCache.spec.ts index 321cb776..5c4dfe6e 100644 --- a/src/sdkManager/__tests__/index.asyncCache.spec.ts +++ b/src/sdkManager/__tests__/index.asyncCache.spec.ts @@ -1,8 +1,8 @@ import splitObject from './mocks/input.json'; import splitView from './mocks/output.json'; import { sdkManagerFactory } from '../index'; -import { SplitsCacheInRedis } from '../../storages/inRedis/SplitsCacheInRedis'; -import { SplitsCachePluggable } from '../../storages/pluggable/SplitsCachePluggable'; +import { DefinitionsCacheInRedis } from '../../storages/inRedis/DefinitionsCacheInRedis'; +import { DefinitionsCachePluggable } from '../../storages/pluggable/DefinitionsCachePluggable'; import { wrapperAdapter } from '../../storages/pluggable/wrapperAdapter'; import { KeyBuilderSS } from '../../storages/KeyBuilderSS'; import { ISdkReadinessManager } from '../../readiness/types'; @@ -31,10 +31,10 @@ describe('Manager with async cache', () => { /** Setup: create manager */ const connection = new RedisAdapter(loggerMock); - const cache = new SplitsCacheInRedis(loggerMock, keys, connection); + const cache = new DefinitionsCacheInRedis(loggerMock, keys, connection); const manager = sdkManagerFactory({ mode: 'consumer', log: loggerMock }, cache, sdkReadinessManagerMock); await cache.clear(); - await cache.addSplit(splitObject as any); + await cache.add(splitObject as any); /** List all splits */ const views = await manager.splits(); @@ -68,14 +68,14 @@ describe('Manager with async cache', () => { expect(await manager.names()).toEqual([]); // If the factory/client is destroyed, `manager.names()` will return empty array either way since the storage is not valid. /** Teardown */ - await cache.removeSplit(splitObject.name); + await cache.remove(splitObject.name); await connection.disconnect(); }); test('handles storage errors', async () => { // passing an empty object as wrapper, to make method calls of splits cache fail returning a rejected promise. // @ts-expect-error - const cache = new SplitsCachePluggable(loggerMock, keys, wrapperAdapter(loggerMock, {})); + const cache = new DefinitionsCachePluggable(loggerMock, keys, wrapperAdapter(loggerMock, {})); const manager = sdkManagerFactory({ mode: 'consumer_partial', log: loggerMock }, cache, sdkReadinessManagerMock); expect(await manager.split('some_split')).toEqual(null); diff --git a/src/sdkManager/__tests__/index.syncCache.spec.ts b/src/sdkManager/__tests__/index.syncCache.spec.ts index 3437f008..b53cce86 100644 --- a/src/sdkManager/__tests__/index.syncCache.spec.ts +++ b/src/sdkManager/__tests__/index.syncCache.spec.ts @@ -1,7 +1,7 @@ import splitObject from './mocks/input.json'; import splitView from './mocks/output.json'; import { sdkManagerFactory } from '../index'; -import { SplitsCacheInMemory } from '../../storages/inMemory/SplitsCacheInMemory'; +import { DefinitionsCacheInMemory } from '../../storages/inMemory/DefinitionsCacheInMemory'; import { ISdkReadinessManager } from '../../readiness/types'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; @@ -18,9 +18,9 @@ const sdkReadinessManagerMock = { describe('Manager with sync cache (In Memory)', () => { /** Setup: create manager */ - const cache = new SplitsCacheInMemory(); + const cache = new DefinitionsCacheInMemory(); const manager = sdkManagerFactory({ mode: 'standalone', log: loggerMock }, cache, sdkReadinessManagerMock); - cache.addSplit(splitObject as any); + cache.add(splitObject as any); test('List all splits', () => { diff --git a/src/sdkManager/index.ts b/src/sdkManager/index.ts index 90bf59ff..3e78468c 100644 --- a/src/sdkManager/index.ts +++ b/src/sdkManager/index.ts @@ -2,7 +2,7 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { thenable } from '../utils/promise/thenable'; import { find } from '../utils/lang'; import { validateDefinition, validateDefinitionExistence, validateIfOperational } from '../utils/inputValidation'; -import { ISplitsCacheAsync, ISplitsCacheSync } from '../storages/types'; +import { IDefinitionsCacheAsync, IDefinitionsCacheSync } from '../storages/types'; import { ISdkReadinessManager } from '../readiness/types'; import { IDefinition } from '../dtos/types'; import { ISettings } from '../types'; @@ -48,11 +48,11 @@ function objectsToViews(splitObjects: IDefinition[]) { return views; } -export function sdkManagerFactory( +export function sdkManagerFactory( settings: Pick, - splits: TSplitCache, + splits: TDefinitionsCache, { readinessManager, sdkStatus }: ISdkReadinessManager, -): TSplitCache extends ISplitsCacheAsync ? SplitIO.IAsyncManager : SplitIO.IManager { +): TDefinitionsCache extends IDefinitionsCacheAsync ? SplitIO.IAsyncManager : SplitIO.IManager { const { log, mode } = settings; const isAsync = isConsumerMode(mode); @@ -70,7 +70,7 @@ export function sdkManagerFactory null).then(result => { // handle possible rejections when using pluggable storage @@ -103,7 +103,7 @@ export function sdkManagerFactory []) : // handle possible rejections when using pluggable storage diff --git a/src/storages/AbstractDefinitionsCacheAsync.ts b/src/storages/AbstractDefinitionsCacheAsync.ts new file mode 100644 index 00000000..90cbc252 --- /dev/null +++ b/src/storages/AbstractDefinitionsCacheAsync.ts @@ -0,0 +1,63 @@ +import { IDefinitionsCacheAsync } from './types'; +import { IDefinition } from '../dtos/types'; +import { objectAssign } from '../utils/lang/objectAssign'; + +/** + * This class provides a skeletal implementation of the IDefinitionsCacheAsync interface + * to minimize the effort required to implement this interface. + */ +export abstract class AbstractDefinitionsCacheAsync implements IDefinitionsCacheAsync { + + protected abstract add(definition: IDefinition): Promise + protected abstract remove(name: string): Promise + protected abstract setChangeNumber(changeNumber: number): Promise + + update(toAdd: IDefinition[], toRemove: string[], changeNumber: number): Promise { + return Promise.all([ + this.setChangeNumber(changeNumber), + Promise.all(toAdd.map(addedFF => this.add(addedFF))), + Promise.all(toRemove.map(removedFF => this.remove(removedFF))) + ]).then(([, added, removed]) => { + return added.some(result => result) || removed.some(result => result); + }); + } + + abstract get(name: string): Promise + abstract getMany(names: string[]): Promise> + abstract getChangeNumber(): Promise + abstract getAll(): Promise + abstract getNames(): Promise + abstract getNamesBySets(sets: string[]): Promise[]> + abstract trafficTypeExists(trafficType: string): Promise + abstract clear(): Promise + + // @TODO revisit segment-related methods ('usesSegments', 'getRegisteredSegments', 'registerSegments') + // noop, just keeping the interface. This is used by standalone client-side API only, and so only implemented by InMemory and InLocalStorage. + usesSegments(): Promise { + return Promise.resolve(true); + } + + /** + * Kill `name` definition and set `defaultTreatment` and `changeNumber`. + * Used for SPLIT_KILL push notifications. + * + * @returns a promise that is resolved once the definition kill operation is performed. The fulfillment value is a boolean: `true` if the operation succeeded updating the definition or `false` if no definition is updated, + * for instance, if the `changeNumber` is old, or if the definition is not found (e.g., `/splitchanges` hasn't been fetched yet), or if the storage fails to apply the update. + * The promise will never be rejected. + */ + killLocally(name: string, defaultTreatment: string, changeNumber: number): Promise { + return this.get(name).then(definition => { + + if (definition && (!definition.changeNumber || definition.changeNumber < changeNumber)) { + const newDefinition = objectAssign({}, definition); + newDefinition.killed = true; + newDefinition.defaultTreatment = defaultTreatment; + newDefinition.changeNumber = changeNumber; + + return this.add(newDefinition); + } + return false; + }).catch(() => false); + } + +} diff --git a/src/storages/AbstractDefinitionsCacheSync.ts b/src/storages/AbstractDefinitionsCacheSync.ts new file mode 100644 index 00000000..a3ef96b5 --- /dev/null +++ b/src/storages/AbstractDefinitionsCacheSync.ts @@ -0,0 +1,95 @@ +import { IDefinitionsCacheSync, IStorageSync } from './types'; +import { IRBSegment, IDefinition } from '../dtos/types'; +import { objectAssign } from '../utils/lang/objectAssign'; +import { IN_SEGMENT, IN_LARGE_SEGMENT } from '../utils/constants'; + +/** + * This class provides a skeletal implementation of the IDefinitionsCacheSync interface + * to minimize the effort required to implement this interface. + */ +export abstract class AbstractDefinitionsCacheSync implements IDefinitionsCacheSync { + + protected abstract add(definition: IDefinition): boolean + protected abstract remove(name: string): boolean + protected abstract setChangeNumber(changeNumber: number): boolean | void + + update(toAdd: IDefinition[], toRemove: string[], changeNumber: number): boolean { + let updated = toAdd.map(addedFF => this.add(addedFF)).some(result => result); + updated = toRemove.map(removedFF => this.remove(removedFF)).some(result => result) || updated; + this.setChangeNumber(changeNumber); + return updated; + } + + abstract get(name: string): IDefinition | null + + getMany(names: string[]): Record { + const definitions: Record = {}; + names.forEach(name => { + definitions[name] = this.get(name); + }); + return definitions; + } + + abstract getChangeNumber(): number + + getAll(): IDefinition[] { + return this.getNames().map(key => this.get(key) as IDefinition); + } + + abstract getNames(): string[] + + abstract trafficTypeExists(trafficType: string): boolean + + abstract usesSegments(): boolean + + abstract clear(): void + + /** + * Kill `name` definition and set `defaultTreatment` and `changeNumber`. + * Used for SPLIT_KILL push notifications. + * + * @returns `true` if the operation successed updating the definition, or `false` if no definition is updated, + * for instance, if the `changeNumber` is old, or if the definition is not found (e.g., `/splitchanges` hasn't been fetched yet), or if the storage fails to apply the update. + */ + killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean { + const definition = this.get(name); + + if (definition && (!definition.changeNumber || definition.changeNumber < changeNumber)) { + const newDefinition = objectAssign({}, definition); + newDefinition.killed = true; + newDefinition.defaultTreatment = defaultTreatment; + newDefinition.changeNumber = changeNumber; + + return this.add(newDefinition); + } + return false; + } + + abstract getNamesBySets(sets: string[]): Set[] + +} + +/** + * Given a parsed definition, it returns a boolean flagging if its conditions use segments matchers (rules & whitelists). + * This util is intended to simplify the implementation of `definitionsCache::usesSegments` method + */ +export function usesSegments(ruleEntity: IDefinition | IRBSegment) { + const conditions = ruleEntity.conditions || []; + for (let i = 0; i < conditions.length; i++) { + const matchers = conditions[i].matcherGroup.matchers; + + for (let j = 0; j < matchers.length; j++) { + const matcher = matchers[j].matcherType; + if (matcher === IN_SEGMENT || matcher === IN_LARGE_SEGMENT) return true; + } + } + + const excluded = (ruleEntity as IRBSegment).excluded; + if (excluded && excluded.segments && excluded.segments.length > 0) return true; + + return false; +} + +export function usesSegmentsSync(storage: Pick) { + return storage.definitions.usesSegments() || storage.rbSegments.usesSegments(); +} diff --git a/src/storages/AbstractSplitsCacheAsync.ts b/src/storages/AbstractSplitsCacheAsync.ts deleted file mode 100644 index 9671ff8c..00000000 --- a/src/storages/AbstractSplitsCacheAsync.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { ISplitsCacheAsync } from './types'; -import { IDefinition } from '../dtos/types'; -import { objectAssign } from '../utils/lang/objectAssign'; - -/** - * This class provides a skeletal implementation of the ISplitsCacheAsync interface - * to minimize the effort required to implement this interface. - */ -export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { - - protected abstract addSplit(split: IDefinition): Promise - protected abstract removeSplit(name: string): Promise - protected abstract setChangeNumber(changeNumber: number): Promise - - update(toAdd: IDefinition[], toRemove: string[], changeNumber: number): Promise { - return Promise.all([ - this.setChangeNumber(changeNumber), - Promise.all(toAdd.map(addedFF => this.addSplit(addedFF))), - Promise.all(toRemove.map(removedFF => this.removeSplit(removedFF))) - ]).then(([, added, removed]) => { - return added.some(result => result) || removed.some(result => result); - }); - } - - abstract getSplit(name: string): Promise - abstract getSplits(names: string[]): Promise> - abstract getChangeNumber(): Promise - abstract getAll(): Promise - abstract getSplitNames(): Promise - abstract getNamesByFlagSets(flagSets: string[]): Promise[]> - abstract trafficTypeExists(trafficType: string): Promise - abstract clear(): Promise - - // @TODO revisit segment-related methods ('usesSegments', 'getRegisteredSegments', 'registerSegments') - // noop, just keeping the interface. This is used by standalone client-side API only, and so only implemented by InMemory and InLocalStorage. - usesSegments(): Promise { - return Promise.resolve(true); - } - - /** - * Kill `name` split and set `defaultTreatment` and `changeNumber`. - * Used for SPLIT_KILL push notifications. - * - * @returns a promise that is resolved once the split kill operation is performed. The fulfillment value is a boolean: `true` if the operation successed updating the split or `false` if no split is updated, - * for instance, if the `changeNumber` is old, or if the split is not found (e.g., `/splitchanges` hasn't been fetched yet), or if the storage fails to apply the update. - * The promise will never be rejected. - */ - killLocally(name: string, defaultTreatment: string, changeNumber: number): Promise { - return this.getSplit(name).then(split => { - - if (split && (!split.changeNumber || split.changeNumber < changeNumber)) { - const newSplit = objectAssign({}, split); - newSplit.killed = true; - newSplit.defaultTreatment = defaultTreatment; - newSplit.changeNumber = changeNumber; - - return this.addSplit(newSplit); - } - return false; - }).catch(() => false); - } - -} diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts deleted file mode 100644 index 86cc57ac..00000000 --- a/src/storages/AbstractSplitsCacheSync.ts +++ /dev/null @@ -1,95 +0,0 @@ -import { ISplitsCacheSync, IStorageSync } from './types'; -import { IRBSegment, IDefinition } from '../dtos/types'; -import { objectAssign } from '../utils/lang/objectAssign'; -import { IN_SEGMENT, IN_LARGE_SEGMENT } from '../utils/constants'; - -/** - * This class provides a skeletal implementation of the ISplitsCacheSync interface - * to minimize the effort required to implement this interface. - */ -export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { - - protected abstract addSplit(split: IDefinition): boolean - protected abstract removeSplit(name: string): boolean - protected abstract setChangeNumber(changeNumber: number): boolean | void - - update(toAdd: IDefinition[], toRemove: string[], changeNumber: number): boolean { - let updated = toAdd.map(addedFF => this.addSplit(addedFF)).some(result => result); - updated = toRemove.map(removedFF => this.removeSplit(removedFF)).some(result => result) || updated; - this.setChangeNumber(changeNumber); - return updated; - } - - abstract getSplit(name: string): IDefinition | null - - getSplits(names: string[]): Record { - const splits: Record = {}; - names.forEach(name => { - splits[name] = this.getSplit(name); - }); - return splits; - } - - abstract getChangeNumber(): number - - getAll(): IDefinition[] { - return this.getSplitNames().map(key => this.getSplit(key) as IDefinition); - } - - abstract getSplitNames(): string[] - - abstract trafficTypeExists(trafficType: string): boolean - - abstract usesSegments(): boolean - - abstract clear(): void - - /** - * Kill `name` split and set `defaultTreatment` and `changeNumber`. - * Used for SPLIT_KILL push notifications. - * - * @returns `true` if the operation successed updating the split, or `false` if no split is updated, - * for instance, if the `changeNumber` is old, or if the split is not found (e.g., `/splitchanges` hasn't been fetched yet), or if the storage fails to apply the update. - */ - killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean { - const split = this.getSplit(name); - - if (split && (!split.changeNumber || split.changeNumber < changeNumber)) { - const newSplit = objectAssign({}, split); - newSplit.killed = true; - newSplit.defaultTreatment = defaultTreatment; - newSplit.changeNumber = changeNumber; - - return this.addSplit(newSplit); - } - return false; - } - - abstract getNamesByFlagSets(flagSets: string[]): Set[] - -} - -/** - * Given a parsed split, it returns a boolean flagging if its conditions use segments matchers (rules & whitelists). - * This util is intended to simplify the implementation of `splitsCache::usesSegments` method - */ -export function usesSegments(ruleEntity: IDefinition | IRBSegment) { - const conditions = ruleEntity.conditions || []; - for (let i = 0; i < conditions.length; i++) { - const matchers = conditions[i].matcherGroup.matchers; - - for (let j = 0; j < matchers.length; j++) { - const matcher = matchers[j].matcherType; - if (matcher === IN_SEGMENT || matcher === IN_LARGE_SEGMENT) return true; - } - } - - const excluded = (ruleEntity as IRBSegment).excluded; - if (excluded && excluded.segments && excluded.segments.length > 0) return true; - - return false; -} - -export function usesSegmentsSync(storage: Pick) { - return storage.splits.usesSegments() || storage.rbSegments.usesSegments(); -} diff --git a/src/storages/KeyBuilder.ts b/src/storages/KeyBuilder.ts index 4167d860..2aeb400b 100644 --- a/src/storages/KeyBuilder.ts +++ b/src/storages/KeyBuilder.ts @@ -21,19 +21,19 @@ export class KeyBuilder { return `${this.prefix}.trafficType.${trafficType}`; } - buildFlagSetKey(flagSet: string) { - return `${this.prefix}.flagSet.${flagSet}`; + buildSetKey(set: string) { + return `${this.prefix}.flagSet.${set}`; } - buildSplitKey(splitName: string) { - return `${this.prefix}.split.${splitName}`; + buildDefinitionKey(definitionName: string) { + return `${this.prefix}.split.${definitionName}`; } - buildSplitsTillKey() { + buildDefinitionsTillKey() { return `${this.prefix}.splits.till`; } - buildSplitKeyPrefix() { + buildDefinitionKeyPrefix() { return `${this.prefix}.split.`; } diff --git a/src/storages/KeyBuilderCS.ts b/src/storages/KeyBuilderCS.ts index deae16af..4d6cf14c 100644 --- a/src/storages/KeyBuilderCS.ts +++ b/src/storages/KeyBuilderCS.ts @@ -9,13 +9,13 @@ export interface MySegmentsKeyBuilder { export class KeyBuilderCS extends KeyBuilder implements MySegmentsKeyBuilder { - protected readonly regexSplitsCacheKey: RegExp; + protected readonly regexDefinitionsCacheKey: RegExp; protected readonly matchingKey: string; constructor(prefix: string, matchingKey: string) { super(prefix); this.matchingKey = matchingKey; - this.regexSplitsCacheKey = new RegExp(`^${prefix}\\.(splits?|trafficType|flagSet)\\.`); + this.regexDefinitionsCacheKey = new RegExp(`^${prefix}\\.(splits?|trafficType|flagSet)\\.`); } /** @@ -35,15 +35,15 @@ export class KeyBuilderCS extends KeyBuilder implements MySegmentsKeyBuilder { return `${this.prefix}.splits.lastUpdated`; } - isSplitsCacheKey(key: string) { - return this.regexSplitsCacheKey.test(key); + isDefinitionsCacheKey(key: string) { + return this.regexDefinitionsCacheKey.test(key); } buildTillKey() { return `${this.prefix}.${this.matchingKey}.segments.till`; } - isSplitKey(key: string) { + isDefinitionKey(key: string) { return startsWith(key, `${this.prefix}.split.`); } @@ -51,7 +51,7 @@ export class KeyBuilderCS extends KeyBuilder implements MySegmentsKeyBuilder { return startsWith(key, `${this.prefix}.rbsegment.`); } - buildSplitsWithSegmentCountKey() { + buildDefinitionsWithSegmentCountKey() { return `${this.prefix}.splits.usingSegments`; } diff --git a/src/storages/KeyBuilderSS.ts b/src/storages/KeyBuilderSS.ts index cf8d2156..33f03c50 100644 --- a/src/storages/KeyBuilderSS.ts +++ b/src/storages/KeyBuilderSS.ts @@ -49,8 +49,8 @@ export class KeyBuilderSS extends KeyBuilder { return `${this.prefix}.events`; } - searchPatternForSplitKeys() { - return `${this.buildSplitKeyPrefix()}*`; + searchPatternForDefinitionKeys() { + return `${this.buildDefinitionKeyPrefix()}*`; } searchPatternForRBSegmentKeys() { diff --git a/src/storages/__tests__/KeyBuilder.spec.ts b/src/storages/__tests__/KeyBuilder.spec.ts index bd21fa66..5874ed40 100644 --- a/src/storages/__tests__/KeyBuilder.spec.ts +++ b/src/storages/__tests__/KeyBuilder.spec.ts @@ -9,9 +9,9 @@ test('KEYS / splits keys', () => { const expectedKey = `SPLITIO.split.${splitName}`; const expectedTill = 'SPLITIO.splits.till'; - expect(builder.buildSplitKey(splitName)).toBe(expectedKey); - expect(builder.buildSplitsTillKey()).toBe(expectedTill); - expect(builder.extractKey(builder.buildSplitKey(splitName))).toBe(splitName); + expect(builder.buildDefinitionKey(splitName)).toBe(expectedKey); + expect(builder.buildDefinitionsTillKey()).toBe(expectedTill); + expect(builder.extractKey(builder.buildDefinitionKey(splitName))).toBe(splitName); }); test('KEYS / splits keys with custom prefix', () => { @@ -22,8 +22,8 @@ test('KEYS / splits keys with custom prefix', () => { const expectedKey = `${prefix}.split.${splitName}`; const expectedTill = `${prefix}.splits.till`; - expect(builder.buildSplitKey(splitName)).toBe(expectedKey); - expect(builder.buildSplitsTillKey() === expectedTill).toBe(true); + expect(builder.buildDefinitionKey(splitName)).toBe(expectedKey); + expect(builder.buildDefinitionsTillKey() === expectedTill).toBe(true); }); const prefix = 'SPLITIO'; @@ -65,7 +65,7 @@ test('KEYS / flag set keys', () => { const flagSetName = 'flagset_x'; const expectedKey = `${prefix}.flagSet.${flagSetName}`; - expect(builder.buildFlagSetKey(flagSetName)).toBe(expectedKey); + expect(builder.buildSetKey(flagSetName)).toBe(expectedKey); }); diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 3a0c93ab..2cc20a71 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -50,7 +50,7 @@ describe('validateRolloutPlan', () => { describe('getRolloutPlan & setRolloutPlan (client-side)', () => { // @ts-expect-error Load server-side storage const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); - serverStorage.splits.update([{ name: 'split1' } as IDefinition], [], 123); + serverStorage.definitions.update([{ name: 'split1' } as IDefinition], [], 123); serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); diff --git a/src/storages/__tests__/testUtils.ts b/src/storages/__tests__/testUtils.ts index 39db48f8..57806035 100644 --- a/src/storages/__tests__/testUtils.ts +++ b/src/storages/__tests__/testUtils.ts @@ -4,7 +4,7 @@ import { IStorageSync, IStorageAsync, IImpressionsCacheSync, IEventsCacheSync } // Assert that instances created by storage factories have the expected interface export function assertStorageInterface(storage: IStorageSync | IStorageAsync) { expect(typeof storage.destroy).toBe('function'); - expect(typeof storage.splits).toBe('object'); + expect(typeof storage.definitions).toBe('object'); expect(typeof storage.segments).toBe('object'); expect(typeof storage.impressions).toBe('object'); expect(typeof storage.events).toBe('object'); diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts index 40e6ea84..56f843f8 100644 --- a/src/storages/getRolloutPlan.ts +++ b/src/storages/getRolloutPlan.ts @@ -12,16 +12,16 @@ import { IMembershipsResponse, IMySegmentsResponse } from '../dtos/types'; export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { const { keys, exposeSegments } = options; - const { splits, segments, rbSegments } = storage; + const { definitions, segments, rbSegments } = storage; log.debug(`storage: get feature flags${keys ? `, and memberships for keys: ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); return { splitChanges: { ff: { - t: splits.getChangeNumber(), + t: definitions.getChangeNumber(), s: -1, - d: splits.getAll(), + d: definitions.getAll(), }, rbs: { t: rbSegments.getChangeNumber(), diff --git a/src/storages/inLocalStorage/DefinitionsCacheInLocal.ts b/src/storages/inLocalStorage/DefinitionsCacheInLocal.ts new file mode 100644 index 00000000..4afc9055 --- /dev/null +++ b/src/storages/inLocalStorage/DefinitionsCacheInLocal.ts @@ -0,0 +1,234 @@ +import { IDefinition } from '../../dtos/types'; +import { AbstractDefinitionsCacheSync, usesSegments } from '../AbstractDefinitionsCacheSync'; +import { isFiniteNumber, toNumber, isNaNNumber } from '../../utils/lang'; +import { KeyBuilderCS } from '../KeyBuilderCS'; +import { ILogger } from '../../logger/types'; +import { LOG_PREFIX } from './constants'; +import { ISettings } from '../../types'; +import { setToArray } from '../../utils/lang/sets'; +import { StorageAdapter } from '../types'; + +export class DefinitionsCacheInLocal extends AbstractDefinitionsCacheSync { + + private readonly keys: KeyBuilderCS; + private readonly log: ILogger; + private readonly setsFilter: string[]; + private hasSync?: boolean; + private readonly storage: StorageAdapter; + + constructor(settings: ISettings, keys: KeyBuilderCS, storage: StorageAdapter) { + super(); + this.keys = keys; + this.log = settings.log; + this.setsFilter = settings.sync.__splitFiltersValidation.groupedFilters.bySet; + this.storage = storage; + } + + private _decrementCount(key: string) { + const count = toNumber(this.storage.getItem(key)) - 1; + if (count > 0) this.storage.setItem(key, count + ''); + else this.storage.removeItem(key); + } + + private _decrementCounts(definition: IDefinition) { + try { + const ttKey = this.keys.buildTrafficTypeKey(definition.trafficTypeName); + this._decrementCount(ttKey); + + if (usesSegments(definition)) { + const segmentsCountKey = this.keys.buildDefinitionsWithSegmentCountKey(); + this._decrementCount(segmentsCountKey); + } + } catch (e) { + this.log.error(LOG_PREFIX + e); + } + } + + private _incrementCounts(definition: IDefinition) { + try { + const ttKey = this.keys.buildTrafficTypeKey(definition.trafficTypeName); + this.storage.setItem(ttKey, (toNumber(this.storage.getItem(ttKey)) + 1) + ''); + + if (usesSegments(definition)) { + const segmentsCountKey = this.keys.buildDefinitionsWithSegmentCountKey(); + this.storage.setItem(segmentsCountKey, (toNumber(this.storage.getItem(segmentsCountKey)) + 1) + ''); + } + } catch (e) { + this.log.error(LOG_PREFIX + e); + } + } + + + /** + * Removes all definitions related data from localStorage (splits, counters, changeNumber and lastUpdated). + * We cannot simply call `localStorage.clear()` since that implies removing user items from the storage. + */ + clear() { + // collect item keys + const len = this.storage.length; + const accum = []; + for (let cur = 0; cur < len; cur++) { + const key = this.storage.key(cur); + if (key != null && this.keys.isDefinitionsCacheKey(key)) accum.push(key); + } + // remove items + accum.forEach(key => { + this.storage.removeItem(key); + }); + + this.hasSync = false; + } + + add(definition: IDefinition) { + const name = definition.name; + const definitionKey = this.keys.buildDefinitionKey(name); + const definitionFromStorage = this.storage.getItem(definitionKey); + const previousDefinition = definitionFromStorage ? JSON.parse(definitionFromStorage) : null; + + if (previousDefinition) { + this._decrementCounts(previousDefinition); + this.removeFromSets(previousDefinition.name, previousDefinition.sets); + } + + this.storage.setItem(definitionKey, JSON.stringify(definition)); + + this._incrementCounts(definition); + this.addToSets(definition); + + return true; + } + + remove(name: string): boolean { + const definition = this.get(name); + if (!definition) return false; + + this.storage.removeItem(this.keys.buildDefinitionKey(name)); + + this._decrementCounts(definition); + this.removeFromSets(definition.name, definition.sets); + + return true; + } + + get(name: string): IDefinition | null { + const item = this.storage.getItem(this.keys.buildDefinitionKey(name)); + return item && JSON.parse(item); + } + + setChangeNumber(changeNumber: number): boolean { + try { + this.storage.setItem(this.keys.buildDefinitionsTillKey(), changeNumber + ''); + // update "last updated" timestamp with current time + this.storage.setItem(this.keys.buildLastUpdatedKey(), Date.now() + ''); + this.hasSync = true; + return true; + } catch (e) { + this.log.error(LOG_PREFIX + e); + return false; + } + } + + getChangeNumber(): number { + const n = -1; + let value: string | number | null = this.storage.getItem(this.keys.buildDefinitionsTillKey()); + + if (value !== null) { + value = parseInt(value, 10); + + return isNaNNumber(value) ? n : value; + } + + return n; + } + + getNames(): string[] { + const len = this.storage.length; + const accum = []; + + let cur = 0; + + while (cur < len) { + const key = this.storage.key(cur); + + if (key != null && this.keys.isDefinitionKey(key)) accum.push(this.keys.extractKey(key)); + + cur++; + } + + return accum; + } + + trafficTypeExists(trafficType: string): boolean { + const ttCount = toNumber(this.storage.getItem(this.keys.buildTrafficTypeKey(trafficType))); + return isFiniteNumber(ttCount) && ttCount > 0; + } + + usesSegments() { + // If cache hasn't been synchronized with the cloud, assume we need them. + if (!this.hasSync) return true; + + const storedCount = this.storage.getItem(this.keys.buildDefinitionsWithSegmentCountKey()); + const definitionsWithSegmentsCount = storedCount === null ? 0 : toNumber(storedCount); + + return isFiniteNumber(definitionsWithSegmentsCount) ? + definitionsWithSegmentsCount > 0 : + true; + } + + getNamesBySets(sets: string[]): Set[] { + return sets.map(set => { + const setKey = this.keys.buildSetKey(set); + const setFromStorage = this.storage.getItem(setKey); + + return new Set(setFromStorage ? JSON.parse(setFromStorage) : []); + }); + } + + private addToSets(definition: IDefinition) { + if (!definition.sets) return; + + definition.sets.forEach(set => { + + if (this.setsFilter.length > 0 && !this.setsFilter.some(filterSet => filterSet === set)) return; + + const setKey = this.keys.buildSetKey(set); + + const setFromStorage = this.storage.getItem(setKey); + + const setCache = new Set(setFromStorage ? JSON.parse(setFromStorage) : []); + + if (setCache.has(definition.name)) return; + + setCache.add(definition.name); + + this.storage.setItem(setKey, JSON.stringify(setToArray(setCache))); + }); + } + + private removeFromSets(definitionName: string, sets?: string[] | null) { + if (!sets) return; + + sets.forEach(set => { + this.removeNames(set, definitionName); + }); + } + + private removeNames(setName: string, definitionName: string) { + const setKey = this.keys.buildSetKey(setName); + + const setFromStorage = this.storage.getItem(setKey); + + if (!setFromStorage) return; + + const setCache = new Set(JSON.parse(setFromStorage)); + setCache.delete(definitionName); + + if (setCache.size === 0) { + this.storage.removeItem(setKey); + return; + } + + this.storage.setItem(setKey, JSON.stringify(setToArray(setCache))); + } + +} diff --git a/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts b/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts index c5601987..6651cefc 100644 --- a/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts @@ -3,7 +3,7 @@ import { ILogger } from '../../logger/types'; import { ISettings } from '../../types'; import { isFiniteNumber, isNaNNumber, toNumber } from '../../utils/lang'; import { setToArray } from '../../utils/lang/sets'; -import { usesSegments } from '../AbstractSplitsCacheSync'; +import { usesSegments } from '../AbstractDefinitionsCacheSync'; import { KeyBuilderCS } from '../KeyBuilderCS'; import { IRBSegmentsCacheSync, StorageAdapter } from '../types'; import { LOG_PREFIX } from './constants'; @@ -42,7 +42,7 @@ export class RBSegmentsCacheInLocal implements IRBSegmentsCacheSync { } private updateSegmentCount(diff: number) { - const segmentsCountKey = this.keys.buildSplitsWithSegmentCountKey(); + const segmentsCountKey = this.keys.buildDefinitionsWithSegmentCountKey(); const count = toNumber(this.storage.getItem(segmentsCountKey)) + diff; if (count > 0) this.storage.setItem(segmentsCountKey, count + ''); else this.storage.removeItem(segmentsCountKey); @@ -121,11 +121,11 @@ export class RBSegmentsCacheInLocal implements IRBSegmentsCacheSync { } usesSegments(): boolean { - const storedCount = this.storage.getItem(this.keys.buildSplitsWithSegmentCountKey()); - const splitsWithSegmentsCount = storedCount === null ? 0 : toNumber(storedCount); + const storedCount = this.storage.getItem(this.keys.buildDefinitionsWithSegmentCountKey()); + const definitionsWithSegmentsCount = storedCount === null ? 0 : toNumber(storedCount); - return isFiniteNumber(splitsWithSegmentsCount) ? - splitsWithSegmentsCount > 0 : + return isFiniteNumber(definitionsWithSegmentsCount) ? + definitionsWithSegmentsCount > 0 : true; } diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts deleted file mode 100644 index fba9d26d..00000000 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ /dev/null @@ -1,234 +0,0 @@ -import { IDefinition } from '../../dtos/types'; -import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; -import { isFiniteNumber, toNumber, isNaNNumber } from '../../utils/lang'; -import { KeyBuilderCS } from '../KeyBuilderCS'; -import { ILogger } from '../../logger/types'; -import { LOG_PREFIX } from './constants'; -import { ISettings } from '../../types'; -import { setToArray } from '../../utils/lang/sets'; -import { StorageAdapter } from '../types'; - -export class SplitsCacheInLocal extends AbstractSplitsCacheSync { - - private readonly keys: KeyBuilderCS; - private readonly log: ILogger; - private readonly flagSetsFilter: string[]; - private hasSync?: boolean; - private readonly storage: StorageAdapter; - - constructor(settings: ISettings, keys: KeyBuilderCS, storage: StorageAdapter) { - super(); - this.keys = keys; - this.log = settings.log; - this.flagSetsFilter = settings.sync.__splitFiltersValidation.groupedFilters.bySet; - this.storage = storage; - } - - private _decrementCount(key: string) { - const count = toNumber(this.storage.getItem(key)) - 1; - if (count > 0) this.storage.setItem(key, count + ''); - else this.storage.removeItem(key); - } - - private _decrementCounts(split: IDefinition) { - try { - const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); - this._decrementCount(ttKey); - - if (usesSegments(split)) { - const segmentsCountKey = this.keys.buildSplitsWithSegmentCountKey(); - this._decrementCount(segmentsCountKey); - } - } catch (e) { - this.log.error(LOG_PREFIX + e); - } - } - - private _incrementCounts(split: IDefinition) { - try { - const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); - this.storage.setItem(ttKey, (toNumber(this.storage.getItem(ttKey)) + 1) + ''); - - if (usesSegments(split)) { - const segmentsCountKey = this.keys.buildSplitsWithSegmentCountKey(); - this.storage.setItem(segmentsCountKey, (toNumber(this.storage.getItem(segmentsCountKey)) + 1) + ''); - } - } catch (e) { - this.log.error(LOG_PREFIX + e); - } - } - - - /** - * Removes all splits cache related data from localStorage (splits, counters, changeNumber and lastUpdated). - * We cannot simply call `localStorage.clear()` since that implies removing user items from the storage. - */ - clear() { - // collect item keys - const len = this.storage.length; - const accum = []; - for (let cur = 0; cur < len; cur++) { - const key = this.storage.key(cur); - if (key != null && this.keys.isSplitsCacheKey(key)) accum.push(key); - } - // remove items - accum.forEach(key => { - this.storage.removeItem(key); - }); - - this.hasSync = false; - } - - addSplit(split: IDefinition) { - const name = split.name; - const splitKey = this.keys.buildSplitKey(name); - const splitFromStorage = this.storage.getItem(splitKey); - const previousSplit = splitFromStorage ? JSON.parse(splitFromStorage) : null; - - if (previousSplit) { - this._decrementCounts(previousSplit); - this.removeFromFlagSets(previousSplit.name, previousSplit.sets); - } - - this.storage.setItem(splitKey, JSON.stringify(split)); - - this._incrementCounts(split); - this.addToFlagSets(split); - - return true; - } - - removeSplit(name: string): boolean { - const split = this.getSplit(name); - if (!split) return false; - - this.storage.removeItem(this.keys.buildSplitKey(name)); - - this._decrementCounts(split); - this.removeFromFlagSets(split.name, split.sets); - - return true; - } - - getSplit(name: string): IDefinition | null { - const item = this.storage.getItem(this.keys.buildSplitKey(name)); - return item && JSON.parse(item); - } - - setChangeNumber(changeNumber: number): boolean { - try { - this.storage.setItem(this.keys.buildSplitsTillKey(), changeNumber + ''); - // update "last updated" timestamp with current time - this.storage.setItem(this.keys.buildLastUpdatedKey(), Date.now() + ''); - this.hasSync = true; - return true; - } catch (e) { - this.log.error(LOG_PREFIX + e); - return false; - } - } - - getChangeNumber(): number { - const n = -1; - let value: string | number | null = this.storage.getItem(this.keys.buildSplitsTillKey()); - - if (value !== null) { - value = parseInt(value, 10); - - return isNaNNumber(value) ? n : value; - } - - return n; - } - - getSplitNames(): string[] { - const len = this.storage.length; - const accum = []; - - let cur = 0; - - while (cur < len) { - const key = this.storage.key(cur); - - if (key != null && this.keys.isSplitKey(key)) accum.push(this.keys.extractKey(key)); - - cur++; - } - - return accum; - } - - trafficTypeExists(trafficType: string): boolean { - const ttCount = toNumber(this.storage.getItem(this.keys.buildTrafficTypeKey(trafficType))); - return isFiniteNumber(ttCount) && ttCount > 0; - } - - usesSegments() { - // If cache hasn't been synchronized with the cloud, assume we need them. - if (!this.hasSync) return true; - - const storedCount = this.storage.getItem(this.keys.buildSplitsWithSegmentCountKey()); - const splitsWithSegmentsCount = storedCount === null ? 0 : toNumber(storedCount); - - return isFiniteNumber(splitsWithSegmentsCount) ? - splitsWithSegmentsCount > 0 : - true; - } - - getNamesByFlagSets(flagSets: string[]): Set[] { - return flagSets.map(flagSet => { - const flagSetKey = this.keys.buildFlagSetKey(flagSet); - const flagSetFromStorage = this.storage.getItem(flagSetKey); - - return new Set(flagSetFromStorage ? JSON.parse(flagSetFromStorage) : []); - }); - } - - private addToFlagSets(featureFlag: IDefinition) { - if (!featureFlag.sets) return; - - featureFlag.sets.forEach(featureFlagSet => { - - if (this.flagSetsFilter.length > 0 && !this.flagSetsFilter.some(filterFlagSet => filterFlagSet === featureFlagSet)) return; - - const flagSetKey = this.keys.buildFlagSetKey(featureFlagSet); - - const flagSetFromStorage = this.storage.getItem(flagSetKey); - - const flagSetCache = new Set(flagSetFromStorage ? JSON.parse(flagSetFromStorage) : []); - - if (flagSetCache.has(featureFlag.name)) return; - - flagSetCache.add(featureFlag.name); - - this.storage.setItem(flagSetKey, JSON.stringify(setToArray(flagSetCache))); - }); - } - - private removeFromFlagSets(featureFlagName: string, flagSets?: string[] | null) { - if (!flagSets) return; - - flagSets.forEach(flagSet => { - this.removeNames(flagSet, featureFlagName); - }); - } - - private removeNames(flagSetName: string, featureFlagName: string) { - const flagSetKey = this.keys.buildFlagSetKey(flagSetName); - - const flagSetFromStorage = this.storage.getItem(flagSetKey); - - if (!flagSetFromStorage) return; - - const flagSetCache = new Set(JSON.parse(flagSetFromStorage)); - flagSetCache.delete(featureFlagName); - - if (flagSetCache.size === 0) { - this.storage.removeItem(flagSetKey); - return; - } - - this.storage.setItem(flagSetKey, JSON.stringify(setToArray(flagSetCache))); - } - -} diff --git a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/DefinitionsCacheInLocal.spec.ts similarity index 55% rename from src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts rename to src/storages/inLocalStorage/__tests__/DefinitionsCacheInLocal.spec.ts index 9a88265f..bec098b3 100644 --- a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/DefinitionsCacheInLocal.spec.ts @@ -1,4 +1,4 @@ -import { SplitsCacheInLocal } from '../SplitsCacheInLocal'; +import { DefinitionsCacheInLocal } from '../DefinitionsCacheInLocal'; import { KeyBuilderCS } from '../../KeyBuilderCS'; import { splitWithUserTT, splitWithAccountTT, splitWithAccountTTAndUsesSegments, something, somethingElse, featureFlagOne, featureFlagTwo, featureFlagThree, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; import { IDefinition } from '../../../dtos/types'; @@ -6,9 +6,9 @@ import { fullSettings } from '../../../utils/settingsValidation/__tests__/settin import { storages, PREFIX } from './wrapper.mock'; -describe.each(storages)('SPLITS CACHE', (storage) => { +describe.each(storages)('DEFINITIONS CACHE', (storage) => { test('LocalStorage', () => { - const cache = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); + const cache = new DefinitionsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); cache.clear(); @@ -18,18 +18,18 @@ describe.each(storages)('SPLITS CACHE', (storage) => { expect(values).toEqual([something, somethingElse]); - cache.removeSplit(something.name); + cache.remove(something.name); - const splits = cache.getSplits([something.name, somethingElse.name]); - expect(splits[something.name]).toEqual(null); - expect(splits[somethingElse.name]).toEqual(somethingElse); + const definitions = cache.getMany([something.name, somethingElse.name]); + expect(definitions[something.name]).toEqual(null); + expect(definitions[somethingElse.name]).toEqual(somethingElse); values = cache.getAll(); expect(values).toEqual([somethingElse]); - expect(cache.getSplit(something.name)).toEqual(null); - expect(cache.getSplit(somethingElse.name)).toEqual(somethingElse); + expect(cache.get(something.name)).toEqual(null); + expect(cache.get(somethingElse.name)).toEqual(somethingElse); expect(cache.getChangeNumber()).toBe(-1); @@ -39,83 +39,83 @@ describe.each(storages)('SPLITS CACHE', (storage) => { }); test('LocalStorage / Get Keys', () => { - const cache = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); + const cache = new DefinitionsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); cache.update([something, somethingElse], [], 1); - const keys = cache.getSplitNames(); + const keys = cache.getNames(); expect(keys.indexOf(something.name) !== -1).toBe(true); expect(keys.indexOf(somethingElse.name) !== -1).toBe(true); }); - test('LocalStorage / Update Splits', () => { - const cache = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); + test('LocalStorage / Update definitions', () => { + const cache = new DefinitionsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); cache.update([something, somethingElse], [], 1); cache.update([], [something.name, somethingElse.name], 1); - expect(cache.getSplit(something.name)).toBe(null); - expect(cache.getSplit(somethingElse.name)).toBe(null); + expect(cache.get(something.name)).toBe(null); + expect(cache.get(somethingElse.name)).toBe(null); }); test('LocalStorage / trafficTypeExists and ttcache tests', () => { - const cache = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); + const cache = new DefinitionsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); cache.update([ { ...splitWithUserTT, name: 'split1' }, { ...splitWithAccountTT, name: 'split2' }, { ...splitWithUserTT, name: 'split3' }, ], [], 1); - cache.addSplit({ ...splitWithUserTT, name: 'split4' }); + cache.add({ ...splitWithUserTT, name: 'split4' }); expect(cache.trafficTypeExists('user_tt')).toBe(true); expect(cache.trafficTypeExists('account_tt')).toBe(true); expect(cache.trafficTypeExists('not_existent_tt')).toBe(false); - cache.removeSplit('split4'); + cache.remove('split4'); expect(cache.trafficTypeExists('user_tt')).toBe(true); expect(cache.trafficTypeExists('account_tt')).toBe(true); - cache.removeSplit('split3'); - cache.removeSplit('split2'); + cache.remove('split3'); + cache.remove('split2'); expect(cache.trafficTypeExists('user_tt')).toBe(true); expect(cache.trafficTypeExists('account_tt')).toBe(false); - cache.removeSplit('split1'); + cache.remove('split1'); expect(cache.trafficTypeExists('user_tt')).toBe(false); expect(cache.trafficTypeExists('account_tt')).toBe(false); - cache.addSplit({ ...splitWithUserTT, name: 'split1' }); + cache.add({ ...splitWithUserTT, name: 'split1' }); expect(cache.trafficTypeExists('user_tt')).toBe(true); - cache.addSplit({ ...splitWithAccountTT, name: 'split1' }); + cache.add({ ...splitWithAccountTT, name: 'split1' }); expect(cache.trafficTypeExists('account_tt')).toBe(true); expect(cache.trafficTypeExists('user_tt')).toBe(false); }); test('LocalStorage / killLocally', () => { - const cache = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); + const cache = new DefinitionsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); - cache.addSplit(something); - cache.addSplit(somethingElse); + cache.add(something); + cache.add(somethingElse); const initialChangeNumber = cache.getChangeNumber(); // kill an non-existent split let updated = cache.killLocally('nonexistent_split', 'other_treatment', 101); - const nonexistentSplit = cache.getSplit('nonexistent_split'); + const nonexistentSplit = cache.get('nonexistent_split'); expect(updated).toBe(false); // killLocally resolves without update if split doesn't exist expect(nonexistentSplit).toBe(null); // non-existent split keeps being non-existent // kill an existent split updated = cache.killLocally(something.name, 'some_treatment', 100); - let lol1Split = cache.getSplit(something.name) as IDefinition; + let lol1Split = cache.get(something.name) as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -125,7 +125,7 @@ describe.each(storages)('SPLITS CACHE', (storage) => { // not update if changeNumber is old updated = cache.killLocally(something.name, 'some_treatment_2', 90); - lol1Split = cache.getSplit(something.name) as IDefinition; + lol1Split = cache.get(something.name) as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older @@ -133,7 +133,7 @@ describe.each(storages)('SPLITS CACHE', (storage) => { }); test('LocalStorage / usesSegments', () => { - const cache = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); + const cache = new DefinitionsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); expect(cache.usesSegments()).toBe(true); // true initially, until data is synchronized cache.setChangeNumber(1); // to indicate that data has been synced. @@ -141,22 +141,22 @@ describe.each(storages)('SPLITS CACHE', (storage) => { cache.update([splitWithUserTT, splitWithAccountTT], [], 1); expect(cache.usesSegments()).toBe(false); // 0 splits using segments - cache.addSplit({ ...splitWithAccountTTAndUsesSegments, name: 'split3' }); + cache.add({ ...splitWithAccountTTAndUsesSegments, name: 'split3' }); expect(cache.usesSegments()).toBe(true); // 1 split using segments - cache.addSplit({ ...splitWithAccountTTAndUsesSegments, name: 'split4' }); + cache.add({ ...splitWithAccountTTAndUsesSegments, name: 'split4' }); expect(cache.usesSegments()).toBe(true); // 2 splits using segments - cache.removeSplit('split3'); + cache.remove('split3'); expect(cache.usesSegments()).toBe(true); // 1 split using segments - cache.removeSplit('split4'); + cache.remove('split4'); expect(cache.usesSegments()).toBe(false); // 0 splits using segments }); test('LocalStorage / flag set cache tests', () => { // @ts-ignore - const cache = new SplitsCacheInLocal({ + const cache = new DefinitionsCacheInLocal({ ...fullSettings, sync: { // @ts-expect-error __splitFiltersValidation: { @@ -173,42 +173,42 @@ describe.each(storages)('SPLITS CACHE', (storage) => { featureFlagTwo, featureFlagThree, ], [], -1); - cache.addSplit(featureFlagWithEmptyFS); + cache.add(featureFlagWithEmptyFS); // Adding an existing FF should not affect the cache cache.update([featureFlagTwo], [], -1); - expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); - expect(cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); - expect(cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesBySets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cache.getNamesBySets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesBySets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesBySets(['t'])).toEqual([emptySet]); // 't' not in filter + expect(cache.getNamesBySets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); - cache.addSplit({ ...featureFlagOne, sets: ['1'] }); + cache.add({ ...featureFlagOne, sets: ['1'] }); - expect(cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); - expect(cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); + expect(cache.getNamesBySets(['1'])).toEqual([emptySet]); // '1' not in filter + expect(cache.getNamesBySets(['o'])).toEqual([new Set(['ff_two'])]); + expect(cache.getNamesBySets(['n'])).toEqual([emptySet]); - cache.addSplit({ ...featureFlagOne, sets: ['x'] }); - expect(cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); + cache.add({ ...featureFlagOne, sets: ['x'] }); + expect(cache.getNamesBySets(['x'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesBySets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); - cache.removeSplit(featureFlagOne.name); - expect(cache.getNamesByFlagSets(['x'])).toEqual([emptySet]); + cache.remove(featureFlagOne.name); + expect(cache.getNamesBySets(['x'])).toEqual([emptySet]); - cache.removeSplit(featureFlagOne.name); - expect(cache.getNamesByFlagSets(['y'])).toEqual([emptySet]); // 'y' not in filter - expect(cache.getNamesByFlagSets([])).toEqual([]); + cache.remove(featureFlagOne.name); + expect(cache.getNamesBySets(['y'])).toEqual([emptySet]); // 'y' not in filter + expect(cache.getNamesBySets([])).toEqual([]); - cache.addSplit(featureFlagWithoutFS); - expect(cache.getNamesByFlagSets([])).toEqual([]); + cache.add(featureFlagWithoutFS); + expect(cache.getNamesBySets([])).toEqual([]); }); // if FlagSets are not defined, it should store all FlagSets in memory. test('LocalStorage / flag set cache tests without filters', () => { - const cache = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); + const cache = new DefinitionsCacheInLocal(fullSettings, new KeyBuilderCS(PREFIX, 'user'), storage); const emptySet = new Set([]); @@ -217,14 +217,14 @@ describe.each(storages)('SPLITS CACHE', (storage) => { featureFlagTwo, featureFlagThree, ], [], -1); - cache.addSplit(featureFlagWithEmptyFS); - - expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); - expect(cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); - expect(cache.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); - expect(cache.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); + cache.add(featureFlagWithEmptyFS); + + expect(cache.getNamesBySets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cache.getNamesBySets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesBySets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesBySets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); + expect(cache.getNamesBySets(['y'])).toEqual([emptySet]); + expect(cache.getNamesBySets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); // Validate that the feature flag cache is cleared when calling `clear` method cache.clear(); diff --git a/src/storages/inLocalStorage/__tests__/validateCache.spec.ts b/src/storages/inLocalStorage/__tests__/validateCache.spec.ts index 6ddfa134..3b2f8cf5 100644 --- a/src/storages/inLocalStorage/__tests__/validateCache.spec.ts +++ b/src/storages/inLocalStorage/__tests__/validateCache.spec.ts @@ -2,7 +2,7 @@ import { validateCache } from '../validateCache'; import { KeyBuilderCS } from '../../KeyBuilderCS'; import { fullSettings } from '../../../utils/settingsValidation/__tests__/settings.mocks'; -import { SplitsCacheInLocal } from '../SplitsCacheInLocal'; +import { DefinitionsCacheInLocal } from '../DefinitionsCacheInLocal'; import { nearlyEqual } from '../../../__tests__/testUtils'; import { MySegmentsCacheInLocal } from '../MySegmentsCacheInLocal'; import { RBSegmentsCacheInLocal } from '../RBSegmentsCacheInLocal'; @@ -15,11 +15,11 @@ describe.each(storages)('validateCache', (storage) => { const logSpy = jest.spyOn(fullSettings.log, 'info'); const segments = new MySegmentsCacheInLocal(fullSettings.log, keys, storage); const largeSegments = new MySegmentsCacheInLocal(fullSettings.log, keys, storage); - const splits = new SplitsCacheInLocal(fullSettings, keys, storage); + const definitions = new DefinitionsCacheInLocal(fullSettings, keys, storage); const rbSegments = new RBSegmentsCacheInLocal(fullSettings, keys, storage); - jest.spyOn(splits, 'getChangeNumber'); - jest.spyOn(splits, 'clear'); + jest.spyOn(definitions, 'getChangeNumber'); + jest.spyOn(definitions, 'clear'); jest.spyOn(rbSegments, 'clear'); jest.spyOn(segments, 'clear'); jest.spyOn(largeSegments, 'clear'); @@ -30,17 +30,17 @@ describe.each(storages)('validateCache', (storage) => { }); test('if there is no cache, it should return initialCacheLoad: true', async () => { - const result = await validateCache({}, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + const result = await validateCache({}, storage, fullSettings, keys, definitions, rbSegments, segments, largeSegments); expect(result.initialCacheLoad).toBe(true); expect(result.lastUpdateTimestamp).toBeUndefined(); expect(logSpy).not.toHaveBeenCalled(); - expect(splits.clear).not.toHaveBeenCalled(); + expect(definitions.clear).not.toHaveBeenCalled(); expect(rbSegments.clear).not.toHaveBeenCalled(); expect(segments.clear).not.toHaveBeenCalled(); expect(largeSegments.clear).not.toHaveBeenCalled(); - expect(splits.getChangeNumber).toHaveBeenCalledTimes(1); + expect(definitions.getChangeNumber).toHaveBeenCalledTimes(1); expect(storage.getItem(keys.buildHashKey())).toBe(FULL_SETTINGS_HASH); expect(storage.getItem(keys.buildLastClear())).toBeNull(); @@ -48,40 +48,40 @@ describe.each(storages)('validateCache', (storage) => { test('if there is cache and it must not be cleared, it should return initialCacheLoad: false', async () => { const lastUpdateTimestamp = Date.now() - 1000 * 60 * 60; // 1 hour ago - storage.setItem(keys.buildSplitsTillKey(), '1'); + storage.setItem(keys.buildDefinitionsTillKey(), '1'); storage.setItem(keys.buildHashKey(), FULL_SETTINGS_HASH); storage.setItem(keys.buildLastUpdatedKey(), lastUpdateTimestamp + ''); await storage.save && storage.save(); - const result = await validateCache({}, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + const result = await validateCache({}, storage, fullSettings, keys, definitions, rbSegments, segments, largeSegments); expect(result.initialCacheLoad).toBe(false); expect(result.lastUpdateTimestamp).toBe(lastUpdateTimestamp); expect(logSpy).not.toHaveBeenCalled(); - expect(splits.clear).not.toHaveBeenCalled(); + expect(definitions.clear).not.toHaveBeenCalled(); expect(rbSegments.clear).not.toHaveBeenCalled(); expect(segments.clear).not.toHaveBeenCalled(); expect(largeSegments.clear).not.toHaveBeenCalled(); - expect(splits.getChangeNumber).toHaveBeenCalledTimes(1); + expect(definitions.getChangeNumber).toHaveBeenCalledTimes(1); expect(storage.getItem(keys.buildHashKey())).toBe(FULL_SETTINGS_HASH); expect(storage.getItem(keys.buildLastClear())).toBeNull(); }); test('if there is cache and it has expired, it should clear cache and return initialCacheLoad: true', async () => { - storage.setItem(keys.buildSplitsTillKey(), '1'); + storage.setItem(keys.buildDefinitionsTillKey(), '1'); storage.setItem(keys.buildHashKey(), FULL_SETTINGS_HASH); storage.setItem(keys.buildLastUpdatedKey(), Date.now() - 1000 * 60 * 60 * 24 * 2 + ''); // 2 days ago await storage.save && storage.save(); - const result = await validateCache({ expirationDays: 1 }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + const result = await validateCache({ expirationDays: 1 }, storage, fullSettings, keys, definitions, rbSegments, segments, largeSegments); expect(result.initialCacheLoad).toBe(true); expect(result.lastUpdateTimestamp).toBeUndefined(); expect(logSpy).toHaveBeenCalledWith('storage:localstorage: Cache expired more than 1 days ago. Cleaning up cache'); - expect(splits.clear).toHaveBeenCalledTimes(1); + expect(definitions.clear).toHaveBeenCalledTimes(1); expect(rbSegments.clear).toHaveBeenCalledTimes(1); expect(segments.clear).toHaveBeenCalledTimes(1); expect(largeSegments.clear).toHaveBeenCalledTimes(1); @@ -91,17 +91,17 @@ describe.each(storages)('validateCache', (storage) => { }); test('if there is cache and its hash has changed, it should clear cache and return initialCacheLoad: true', async () => { - storage.setItem(keys.buildSplitsTillKey(), '1'); + storage.setItem(keys.buildDefinitionsTillKey(), '1'); storage.setItem(keys.buildHashKey(), FULL_SETTINGS_HASH); await storage.save && storage.save(); - const result = await validateCache({}, storage, { ...fullSettings, core: { ...fullSettings.core, authorizationKey: 'another-sdk-key' } }, keys, splits, rbSegments, segments, largeSegments); + const result = await validateCache({}, storage, { ...fullSettings, core: { ...fullSettings.core, authorizationKey: 'another-sdk-key' } }, keys, definitions, rbSegments, segments, largeSegments); expect(result.initialCacheLoad).toBe(true); expect(result.lastUpdateTimestamp).toBeUndefined(); expect(logSpy).toHaveBeenCalledWith('storage:localstorage: SDK key, flags filter criteria, or flags spec version has changed. Cleaning up cache'); - expect(splits.clear).toHaveBeenCalledTimes(1); + expect(definitions.clear).toHaveBeenCalledTimes(1); expect(rbSegments.clear).toHaveBeenCalledTimes(1); expect(segments.clear).toHaveBeenCalledTimes(1); expect(largeSegments.clear).toHaveBeenCalledTimes(1); @@ -113,17 +113,17 @@ describe.each(storages)('validateCache', (storage) => { test('if there is cache and clearOnInit is true, it should clear cache and return initialCacheLoad: true', async () => { // Older cache version (without last clear) storage.removeItem(keys.buildLastClear()); - storage.setItem(keys.buildSplitsTillKey(), '1'); + storage.setItem(keys.buildDefinitionsTillKey(), '1'); storage.setItem(keys.buildHashKey(), FULL_SETTINGS_HASH); await storage.save && storage.save(); - const result = await validateCache({ clearOnInit: true }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + const result = await validateCache({ clearOnInit: true }, storage, fullSettings, keys, definitions, rbSegments, segments, largeSegments); expect(result.initialCacheLoad).toBe(true); expect(result.lastUpdateTimestamp).toBeUndefined(); expect(logSpy).toHaveBeenCalledWith('storage:localstorage: clearOnInit was set and cache was not cleared in the last 24 hours. Cleaning up cache'); - expect(splits.clear).toHaveBeenCalledTimes(1); + expect(definitions.clear).toHaveBeenCalledTimes(1); expect(rbSegments.clear).toHaveBeenCalledTimes(1); expect(segments.clear).toHaveBeenCalledTimes(1); expect(largeSegments.clear).toHaveBeenCalledTimes(1); @@ -135,9 +135,9 @@ describe.each(storages)('validateCache', (storage) => { // If cache is cleared, it should not clear again until a day has passed logSpy.mockClear(); const lastUpdateTimestamp = Date.now() - 1000 * 60 * 60; // 1 hour ago - storage.setItem(keys.buildSplitsTillKey(), '1'); + storage.setItem(keys.buildDefinitionsTillKey(), '1'); storage.setItem(keys.buildLastUpdatedKey(), lastUpdateTimestamp + ''); - const result2 = await validateCache({ clearOnInit: true }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + const result2 = await validateCache({ clearOnInit: true }, storage, fullSettings, keys, definitions, rbSegments, segments, largeSegments); expect(result2.initialCacheLoad).toBe(false); expect(result2.lastUpdateTimestamp).toBe(lastUpdateTimestamp); expect(logSpy).not.toHaveBeenCalled(); @@ -145,11 +145,11 @@ describe.each(storages)('validateCache', (storage) => { // If a day has passed, it should clear again storage.setItem(keys.buildLastClear(), (Date.now() - 1000 * 60 * 60 * 24 - 1) + ''); - const result3 = await validateCache({ clearOnInit: true }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + const result3 = await validateCache({ clearOnInit: true }, storage, fullSettings, keys, definitions, rbSegments, segments, largeSegments); expect(result3.initialCacheLoad).toBe(true); expect(result3.lastUpdateTimestamp).toBeUndefined(); expect(logSpy).toHaveBeenCalledWith('storage:localstorage: clearOnInit was set and cache was not cleared in the last 24 hours. Cleaning up cache'); - expect(splits.clear).toHaveBeenCalledTimes(2); + expect(definitions.clear).toHaveBeenCalledTimes(2); expect(rbSegments.clear).toHaveBeenCalledTimes(2); expect(segments.clear).toHaveBeenCalledTimes(2); expect(largeSegments.clear).toHaveBeenCalledTimes(2); diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index cc5d38f4..5b063ed3 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -5,7 +5,7 @@ import { IStorageFactoryParams, IStorageSync, IStorageSyncFactory, StorageAdapte import { validatePrefix } from '../KeyBuilder'; import { KeyBuilderCS, myLargeSegmentsKeyBuilder } from '../KeyBuilderCS'; import { isLocalStorageAvailable, isValidStorageWrapper, isWebStorage } from '../../utils/env/isLocalStorageAvailable'; -import { SplitsCacheInLocal } from './SplitsCacheInLocal'; +import { DefinitionsCacheInLocal } from './DefinitionsCacheInLocal'; import { RBSegmentsCacheInLocal } from './RBSegmentsCacheInLocal'; import { MySegmentsCacheInLocal } from './MySegmentsCacheInLocal'; import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; @@ -50,26 +50,26 @@ export function InLocalStorage(options: SplitIO.InLocalStorageOptions = {}): ISt const matchingKey = getMatching(settings.core.key); const keys = new KeyBuilderCS(prefix, matchingKey); - const splits = new SplitsCacheInLocal(settings, keys, storage); + const definitions = new DefinitionsCacheInLocal(settings, keys, storage); const rbSegments = new RBSegmentsCacheInLocal(settings, keys, storage); const segments = new MySegmentsCacheInLocal(log, keys, storage); const largeSegments = new MySegmentsCacheInLocal(log, myLargeSegmentsKeyBuilder(prefix, matchingKey), storage); let validateCachePromise: Promise | undefined; return { - splits, + definitions, rbSegments, segments, largeSegments, impressions: new ImpressionsCacheInMemory(impressionsQueueSize), impressionCounts: new ImpressionCountsCacheInMemory(), events: new EventsCacheInMemory(eventsQueueSize), - telemetry: shouldRecordTelemetry(params) ? new TelemetryCacheInMemory(splits, segments) : undefined, + telemetry: shouldRecordTelemetry(params) ? new TelemetryCacheInMemory(definitions, segments) : undefined, uniqueKeys: new UniqueKeysCacheInMemoryCS(), validateCache() { if (!validateCachePromise) { - validateCachePromise = validateCache(options, storage, settings, keys, splits, rbSegments, segments, largeSegments); + validateCachePromise = validateCache(options, storage, settings, keys, definitions, rbSegments, segments, largeSegments); } return validateCachePromise; }, @@ -86,7 +86,7 @@ export function InLocalStorage(options: SplitIO.InLocalStorageOptions = {}): ISt shared(matchingKey: string) { return { - splits: this.splits, + definitions: this.definitions, rbSegments: this.rbSegments, segments: new MySegmentsCacheInLocal(log, new KeyBuilderCS(prefix, matchingKey), storage), largeSegments: new MySegmentsCacheInLocal(log, myLargeSegmentsKeyBuilder(prefix, matchingKey), storage), diff --git a/src/storages/inLocalStorage/validateCache.ts b/src/storages/inLocalStorage/validateCache.ts index d9fa8de0..dccb09c9 100644 --- a/src/storages/inLocalStorage/validateCache.ts +++ b/src/storages/inLocalStorage/validateCache.ts @@ -2,7 +2,7 @@ import { ISettings } from '../../types'; import { isFiniteNumber, isNaNNumber } from '../../utils/lang'; import { getStorageHash } from '../KeyBuilder'; import { LOG_PREFIX } from './constants'; -import type { SplitsCacheInLocal } from './SplitsCacheInLocal'; +import type { DefinitionsCacheInLocal } from './DefinitionsCacheInLocal'; import type { RBSegmentsCacheInLocal } from './RBSegmentsCacheInLocal'; import type { MySegmentsCacheInLocal } from './MySegmentsCacheInLocal'; import { KeyBuilderCS } from '../KeyBuilderCS'; @@ -68,11 +68,11 @@ function validateExpiration(options: SplitIO.InLocalStorageOptions, storage: Sto * * @returns Metadata object with `initialCacheLoad` (true if is fresh install, false if is ready from cache) and `lastUpdateTimestamp` (timestamp of last cache update or undefined) */ -export function validateCache(options: SplitIO.InLocalStorageOptions, storage: StorageAdapter, settings: ISettings, keys: KeyBuilderCS, splits: SplitsCacheInLocal, rbSegments: RBSegmentsCacheInLocal, segments: MySegmentsCacheInLocal, largeSegments: MySegmentsCacheInLocal): Promise { +export function validateCache(options: SplitIO.InLocalStorageOptions, storage: StorageAdapter, settings: ISettings, keys: KeyBuilderCS, definitions: DefinitionsCacheInLocal, rbSegments: RBSegmentsCacheInLocal, segments: MySegmentsCacheInLocal, largeSegments: MySegmentsCacheInLocal): Promise { return Promise.resolve(storage.load && storage.load()).then(() => { const currentTimestamp = Date.now(); - const isThereCache = splits.getChangeNumber() > -1; + const isThereCache = definitions.getChangeNumber() > -1; // Get lastUpdateTimestamp from storage const lastUpdatedTimestampStr = storage.getItem(keys.buildLastUpdatedKey()); @@ -80,7 +80,7 @@ export function validateCache(options: SplitIO.InLocalStorageOptions, storage: S const lastUpdateTimestamp = (!isNaNNumber(lastUpdatedTimestamp) && lastUpdatedTimestamp !== undefined) ? lastUpdatedTimestamp : undefined; if (validateExpiration(options, storage, settings, keys, currentTimestamp, isThereCache)) { - splits.clear(); + definitions.clear(); rbSegments.clear(); segments.clear(); largeSegments.clear(); diff --git a/src/storages/inMemory/DefinitionsCacheInMemory.ts b/src/storages/inMemory/DefinitionsCacheInMemory.ts new file mode 100644 index 00000000..f0d5addd --- /dev/null +++ b/src/storages/inMemory/DefinitionsCacheInMemory.ts @@ -0,0 +1,130 @@ +import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; +import { AbstractDefinitionsCacheSync, usesSegments } from '../AbstractDefinitionsCacheSync'; +import { isFiniteNumber } from '../../utils/lang'; + +/** + * Default IDefinitionsCacheSync implementation that stores split definitions in memory. + */ +export class DefinitionsCacheInMemory extends AbstractDefinitionsCacheSync { + + private setsFilter: string[]; + private definitionsCache: Record = {}; + private ttCache: Record = {}; + private changeNumber: number = -1; + private segmentsCount: number = 0; + private setsCache: Record> = {}; + + constructor(splitFiltersValidation?: ISplitFiltersValidation) { + super(); + this.setsFilter = splitFiltersValidation ? splitFiltersValidation.groupedFilters.bySet : []; + } + + clear() { + this.definitionsCache = {}; + this.ttCache = {}; + this.changeNumber = -1; + this.segmentsCount = 0; + this.setsCache = {}; + } + + add(definition: IDefinition): boolean { + const name = definition.name; + const previousDefinition = this.get(name); + if (previousDefinition) { // We had this Split already + + const previousTtName = previousDefinition.trafficTypeName; + this.ttCache[previousTtName]--; + if (!this.ttCache[previousTtName]) delete this.ttCache[previousTtName]; + + this.removeFromFlagSets(previousDefinition.name, previousDefinition.sets); + + // Subtract from segments count for the previous version of this Split + if (usesSegments(previousDefinition)) this.segmentsCount--; + } + + // Store the Split. + this.definitionsCache[name] = definition; + // Update TT cache + const ttName = definition.trafficTypeName; + this.ttCache[ttName] = (this.ttCache[ttName] || 0) + 1; + this.addToFlagSets(definition); + + // Add to segments count for the new version of the Split + if (usesSegments(definition)) this.segmentsCount++; + + return true; + } + + remove(name: string): boolean { + const definition = this.get(name); + if (!definition) return false; + + // Delete the Split + delete this.definitionsCache[name]; + + const ttName = definition.trafficTypeName; + this.ttCache[ttName]--; // Update tt cache + if (!this.ttCache[ttName]) delete this.ttCache[ttName]; + this.removeFromFlagSets(definition.name, definition.sets); + + // Update the segments count. + if (usesSegments(definition)) this.segmentsCount--; + + return true; + } + + get(name: string): IDefinition | null { + return this.definitionsCache[name] || null; + } + + setChangeNumber(changeNumber: number): boolean { + this.changeNumber = changeNumber; + return true; + } + + getChangeNumber(): number { + return this.changeNumber; + } + + getNames(): string[] { + return Object.keys(this.definitionsCache); + } + + trafficTypeExists(trafficType: string): boolean { + return isFiniteNumber(this.ttCache[trafficType]) && this.ttCache[trafficType] > 0; + } + + usesSegments(): boolean { + return this.getChangeNumber() === -1 || this.segmentsCount > 0; + } + + getNamesBySets(sets: string[]): Set[] { + return sets.map(set => this.setsCache[set] || new Set()); + } + + private addToFlagSets(featureFlag: IDefinition) { + if (!featureFlag.sets) return; + featureFlag.sets.forEach(featureFlagSet => { + + if (this.setsFilter.length > 0 && !this.setsFilter.some(filterFlagSet => filterFlagSet === featureFlagSet)) return; + + if (!this.setsCache[featureFlagSet]) this.setsCache[featureFlagSet] = new Set([]); + + this.setsCache[featureFlagSet].add(featureFlag.name); + }); + } + + private removeFromFlagSets(featureFlagName: string, sets?: string[] | null) { + if (!sets) return; + sets.forEach(set => { + this.removeNames(set, featureFlagName); + }); + } + + private removeNames(setName: string, featureFlagName: string) { + if (!this.setsCache[setName]) return; + this.setsCache[setName].delete(featureFlagName); + if (this.setsCache[setName].size === 0) delete this.setsCache[setName]; + } + +} diff --git a/src/storages/inMemory/InMemoryStorage.ts b/src/storages/inMemory/InMemoryStorage.ts index e89a875d..47eb9777 100644 --- a/src/storages/inMemory/InMemoryStorage.ts +++ b/src/storages/inMemory/InMemoryStorage.ts @@ -1,4 +1,4 @@ -import { SplitsCacheInMemory } from './SplitsCacheInMemory'; +import { DefinitionsCacheInMemory } from './DefinitionsCacheInMemory'; import { SegmentsCacheInMemory } from './SegmentsCacheInMemory'; import { ImpressionsCacheInMemory } from './ImpressionsCacheInMemory'; import { EventsCacheInMemory } from './EventsCacheInMemory'; @@ -17,18 +17,18 @@ import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; export function InMemoryStorageFactory(params: IStorageFactoryParams): IStorageSync { const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { __splitFiltersValidation } } } = params; - const splits = new SplitsCacheInMemory(__splitFiltersValidation); + const definitions = new DefinitionsCacheInMemory(__splitFiltersValidation); const rbSegments = new RBSegmentsCacheInMemory(); const segments = new SegmentsCacheInMemory(); const storage = { - splits, + definitions, rbSegments, segments, impressions: new ImpressionsCacheInMemory(impressionsQueueSize), impressionCounts: new ImpressionCountsCacheInMemory(), events: new EventsCacheInMemory(eventsQueueSize), - telemetry: shouldRecordTelemetry(params) ? new TelemetryCacheInMemory(splits, segments) : undefined, + telemetry: shouldRecordTelemetry(params) ? new TelemetryCacheInMemory(definitions, segments) : undefined, uniqueKeys: new UniqueKeysCacheInMemory(), destroy() { } diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 5ae8351c..70254c1d 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -1,4 +1,4 @@ -import { SplitsCacheInMemory } from './SplitsCacheInMemory'; +import { DefinitionsCacheInMemory } from './DefinitionsCacheInMemory'; import { MySegmentsCacheInMemory } from './MySegmentsCacheInMemory'; import { ImpressionsCacheInMemory } from './ImpressionsCacheInMemory'; import { EventsCacheInMemory } from './EventsCacheInMemory'; @@ -17,20 +17,20 @@ import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation } } } = params; - const splits = new SplitsCacheInMemory(__splitFiltersValidation); + const definitions = new DefinitionsCacheInMemory(__splitFiltersValidation); const rbSegments = new RBSegmentsCacheInMemory(); const segments = new MySegmentsCacheInMemory(); const largeSegments = new MySegmentsCacheInMemory(); const storage = { - splits, + definitions, rbSegments, segments, largeSegments, impressions: new ImpressionsCacheInMemory(impressionsQueueSize), impressionCounts: new ImpressionCountsCacheInMemory(), events: new EventsCacheInMemory(eventsQueueSize), - telemetry: shouldRecordTelemetry(params) ? new TelemetryCacheInMemory(splits, segments) : undefined, + telemetry: shouldRecordTelemetry(params) ? new TelemetryCacheInMemory(definitions, segments) : undefined, uniqueKeys: new UniqueKeysCacheInMemoryCS(), destroy() { }, @@ -38,7 +38,7 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag // When using shared instantiation with MEMORY we reuse everything but segments (they are unique per key) shared() { return { - splits: this.splits, + definitions: this.definitions, rbSegments: this.rbSegments, segments: new MySegmentsCacheInMemory(), largeSegments: new MySegmentsCacheInMemory(), diff --git a/src/storages/inMemory/RBSegmentsCacheInMemory.ts b/src/storages/inMemory/RBSegmentsCacheInMemory.ts index 995ed46a..059347c5 100644 --- a/src/storages/inMemory/RBSegmentsCacheInMemory.ts +++ b/src/storages/inMemory/RBSegmentsCacheInMemory.ts @@ -1,6 +1,6 @@ import { IRBSegment } from '../../dtos/types'; import { setToArray } from '../../utils/lang/sets'; -import { usesSegments } from '../AbstractSplitsCacheSync'; +import { usesSegments } from '../AbstractDefinitionsCacheSync'; import { IRBSegmentsCacheSync } from '../types'; export class RBSegmentsCacheInMemory implements IRBSegmentsCacheSync { diff --git a/src/storages/inMemory/SegmentsCacheInMemory.ts b/src/storages/inMemory/SegmentsCacheInMemory.ts index d25b89d2..4ed85edf 100644 --- a/src/storages/inMemory/SegmentsCacheInMemory.ts +++ b/src/storages/inMemory/SegmentsCacheInMemory.ts @@ -2,7 +2,7 @@ import { isIntegerNumber } from '../../utils/lang'; import { ISegmentsCacheSync } from '../types'; /** - * Default ISplitsCacheSync implementation for server-side that stores segments definitions in memory. + * Default IDefinitionsCacheSync implementation for server-side that stores segments definitions in memory. */ export class SegmentsCacheInMemory implements ISegmentsCacheSync { diff --git a/src/storages/inMemory/SplitsCacheInMemory.ts b/src/storages/inMemory/SplitsCacheInMemory.ts deleted file mode 100644 index 6f611279..00000000 --- a/src/storages/inMemory/SplitsCacheInMemory.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; -import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; -import { isFiniteNumber } from '../../utils/lang'; - -/** - * Default ISplitsCacheSync implementation that stores split definitions in memory. - */ -export class SplitsCacheInMemory extends AbstractSplitsCacheSync { - - private flagSetsFilter: string[]; - private splitsCache: Record = {}; - private ttCache: Record = {}; - private changeNumber: number = -1; - private segmentsCount: number = 0; - private flagSetsCache: Record> = {}; - - constructor(splitFiltersValidation?: ISplitFiltersValidation) { - super(); - this.flagSetsFilter = splitFiltersValidation ? splitFiltersValidation.groupedFilters.bySet : []; - } - - clear() { - this.splitsCache = {}; - this.ttCache = {}; - this.changeNumber = -1; - this.segmentsCount = 0; - this.flagSetsCache = {}; - } - - addSplit(split: IDefinition): boolean { - const name = split.name; - const previousSplit = this.getSplit(name); - if (previousSplit) { // We had this Split already - - const previousTtName = previousSplit.trafficTypeName; - this.ttCache[previousTtName]--; - if (!this.ttCache[previousTtName]) delete this.ttCache[previousTtName]; - - this.removeFromFlagSets(previousSplit.name, previousSplit.sets); - - // Subtract from segments count for the previous version of this Split - if (usesSegments(previousSplit)) this.segmentsCount--; - } - - // Store the Split. - this.splitsCache[name] = split; - // Update TT cache - const ttName = split.trafficTypeName; - this.ttCache[ttName] = (this.ttCache[ttName] || 0) + 1; - this.addToFlagSets(split); - - // Add to segments count for the new version of the Split - if (usesSegments(split)) this.segmentsCount++; - - return true; - } - - removeSplit(name: string): boolean { - const split = this.getSplit(name); - if (!split) return false; - - // Delete the Split - delete this.splitsCache[name]; - - const ttName = split.trafficTypeName; - this.ttCache[ttName]--; // Update tt cache - if (!this.ttCache[ttName]) delete this.ttCache[ttName]; - this.removeFromFlagSets(split.name, split.sets); - - // Update the segments count. - if (usesSegments(split)) this.segmentsCount--; - - return true; - } - - getSplit(name: string): IDefinition | null { - return this.splitsCache[name] || null; - } - - setChangeNumber(changeNumber: number): boolean { - this.changeNumber = changeNumber; - return true; - } - - getChangeNumber(): number { - return this.changeNumber; - } - - getSplitNames(): string[] { - return Object.keys(this.splitsCache); - } - - trafficTypeExists(trafficType: string): boolean { - return isFiniteNumber(this.ttCache[trafficType]) && this.ttCache[trafficType] > 0; - } - - usesSegments(): boolean { - return this.getChangeNumber() === -1 || this.segmentsCount > 0; - } - - getNamesByFlagSets(flagSets: string[]): Set[] { - return flagSets.map(flagSet => this.flagSetsCache[flagSet] || new Set()); - } - - private addToFlagSets(featureFlag: IDefinition) { - if (!featureFlag.sets) return; - featureFlag.sets.forEach(featureFlagSet => { - - if (this.flagSetsFilter.length > 0 && !this.flagSetsFilter.some(filterFlagSet => filterFlagSet === featureFlagSet)) return; - - if (!this.flagSetsCache[featureFlagSet]) this.flagSetsCache[featureFlagSet] = new Set([]); - - this.flagSetsCache[featureFlagSet].add(featureFlag.name); - }); - } - - private removeFromFlagSets(featureFlagName: string, flagSets?: string[] | null) { - if (!flagSets) return; - flagSets.forEach(flagSet => { - this.removeNames(flagSet, featureFlagName); - }); - } - - private removeNames(flagSetName: string, featureFlagName: string) { - if (!this.flagSetsCache[flagSetName]) return; - this.flagSetsCache[flagSetName].delete(featureFlagName); - if (this.flagSetsCache[flagSetName].size === 0) delete this.flagSetsCache[flagSetName]; - } - -} diff --git a/src/storages/inMemory/TelemetryCacheInMemory.ts b/src/storages/inMemory/TelemetryCacheInMemory.ts index a0fad927..29efa02d 100644 --- a/src/storages/inMemory/TelemetryCacheInMemory.ts +++ b/src/storages/inMemory/TelemetryCacheInMemory.ts @@ -2,7 +2,7 @@ import { ImpressionDataType, EventDataType, LastSync, HttpErrors, HttpLatencies, import { DEDUPED, DROPPED, LOCALHOST_MODE, QUEUED } from '../../utils/constants'; import { checkIfServerSide } from '../../utils/key'; import { findLatencyIndex } from '../findLatencyIndex'; -import { ISegmentsCacheSync, ISplitsCacheSync, IStorageFactoryParams, ITelemetryCacheSync } from '../types'; +import { ISegmentsCacheSync, IDefinitionsCacheSync, IStorageFactoryParams, ITelemetryCacheSync } from '../types'; const MAX_STREAMING_EVENTS = 20; const MAX_TAGS = 10; @@ -28,7 +28,7 @@ export class TelemetryCacheInMemory implements ITelemetryCacheSync { public name = 'telemetry stats'; - constructor(private splits?: ISplitsCacheSync, private segments?: ISegmentsCacheSync, private largeSegments?: ISegmentsCacheSync) { } + constructor(private definitions?: IDefinitionsCacheSync, private segments?: ISegmentsCacheSync, private largeSegments?: ISegmentsCacheSync) { } // isEmpty flag private e = true; @@ -51,7 +51,7 @@ export class TelemetryCacheInMemory implements ITelemetryCacheSync { iQ: this.getImpressionStats(QUEUED), iDe: this.getImpressionStats(DEDUPED), iDr: this.getImpressionStats(DROPPED), - spC: this.splits && this.splits.getSplitNames().length, + spC: this.definitions && this.definitions.getNames().length, seC: this.segments && this.segments.getRegisteredSegments().length, skC: this.segments && this.segments.getKeysCount(), lsC: this.largeSegments && this.largeSegments.getRegisteredSegments().length, diff --git a/src/storages/inMemory/__tests__/DefinitionsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/DefinitionsCacheInMemory.spec.ts new file mode 100644 index 00000000..77e96414 --- /dev/null +++ b/src/storages/inMemory/__tests__/DefinitionsCacheInMemory.spec.ts @@ -0,0 +1,190 @@ +import { DefinitionsCacheInMemory } from '../DefinitionsCacheInMemory'; +import { IDefinition } from '../../../dtos/types'; +import { splitWithUserTT, splitWithAccountTT, something, somethingElse, featureFlagWithEmptyFS, featureFlagWithoutFS, featureFlagOne, featureFlagTwo, featureFlagThree } from '../../__tests__/testUtils'; + +test('DEFINITIONS CACHE / In Memory', () => { + const cache = new DefinitionsCacheInMemory(); + + cache.update([something, somethingElse], [], -1); + + let values = cache.getAll(); + + expect(values).toEqual([something, somethingElse]); + + cache.remove(something.name); + + const definitions = cache.getMany([something.name, somethingElse.name]); + expect(definitions[something.name]).toEqual(null); + expect(definitions[somethingElse.name]).toEqual(somethingElse); + + values = cache.getAll(); + + expect(values).toEqual([somethingElse]); + + expect(cache.get(something.name)).toEqual(null); + expect(cache.get(somethingElse.name)).toEqual(somethingElse); + + expect(cache.getChangeNumber()).toBe(-1); + cache.setChangeNumber(123); + expect(cache.getChangeNumber()).toBe(123); + +}); + +test('DEFINITIONS CACHE / In Memory / Get Keys', () => { + const cache = new DefinitionsCacheInMemory(); + + cache.update([something, somethingElse], [], 1); + + const keys = cache.getNames(); + + expect(keys.indexOf(something.name) !== -1).toBe(true); + expect(keys.indexOf(somethingElse.name) !== -1).toBe(true); +}); + +test('DEFINITIONS CACHE / In Memory / Update', () => { + const cache = new DefinitionsCacheInMemory(); + + cache.update([something, somethingElse], [], 1); + + cache.update([], [something.name, somethingElse.name], 1); + + expect(cache.get(something.name)).toBe(null); + expect(cache.get(somethingElse.name)).toBe(null); +}); + +test('DEFINITIONS CACHE / In Memory / trafficTypeExists and ttcache tests', () => { + const cache = new DefinitionsCacheInMemory(); + + cache.update([ + { ...splitWithUserTT, name: 'split1' }, + { ...splitWithAccountTT, name: 'split2' }, + { ...splitWithUserTT, name: 'split3' }, + ], [], 1); + cache.add({ ...splitWithUserTT, name: 'split4' }); + + expect(cache.trafficTypeExists('user_tt')).toBe(true); + expect(cache.trafficTypeExists('account_tt')).toBe(true); + expect(cache.trafficTypeExists('not_existent_tt')).toBe(false); + + cache.remove('split4'); + + expect(cache.trafficTypeExists('user_tt')).toBe(true); + expect(cache.trafficTypeExists('account_tt')).toBe(true); + + cache.remove('split3'); + cache.remove('split2'); + + expect(cache.trafficTypeExists('user_tt')).toBe(true); + expect(cache.trafficTypeExists('account_tt')).toBe(false); + + cache.remove('split1'); + + expect(cache.trafficTypeExists('user_tt')).toBe(false); + expect(cache.trafficTypeExists('account_tt')).toBe(false); + + cache.add({ ...splitWithUserTT, name: 'split1' }); + expect(cache.trafficTypeExists('user_tt')).toBe(true); + + cache.add({ ...splitWithAccountTT, name: 'split1' }); + expect(cache.trafficTypeExists('account_tt')).toBe(true); + expect(cache.trafficTypeExists('user_tt')).toBe(false); + +}); + +test('DEFINITIONS CACHE / In Memory / killLocally', () => { + const cache = new DefinitionsCacheInMemory(); + cache.add(something); + cache.add(somethingElse); + const initialChangeNumber = cache.getChangeNumber(); + + // kill an non-existent split + let updated = cache.killLocally('nonexistent_split', 'other_treatment', 101); + const nonexistentSplit = cache.get('nonexistent_split'); + + expect(updated).toBe(false); // killLocally resolves without update if split doesn't exist + expect(nonexistentSplit).toBe(null); // non-existent split keeps being non-existent + + // kill an existent split + updated = cache.killLocally(something.name, 'some_treatment', 100); + let lol1Split = cache.get(something.name) as IDefinition; + + expect(updated).toBe(true); // killLocally resolves with update if split is changed + expect(lol1Split.killed).toBe(true); // existing split must be killed + expect(lol1Split.defaultTreatment).toBe('some_treatment'); // existing split must have new default treatment + expect(lol1Split.changeNumber).toBe(100); // existing split must have the given change number + expect(cache.getChangeNumber()).toBe(initialChangeNumber); // cache changeNumber is not changed + + // not update if changeNumber is old + updated = cache.killLocally(something.name, 'some_treatment_2', 90); + lol1Split = cache.get(something.name) as IDefinition; + + expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old + expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older + +}); + +test('DEFINITIONS CACHE / In Memory / sets cache tests', () => { + // @ts-ignore + const cache = new DefinitionsCacheInMemory({ groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); + const emptySet = new Set([]); + + cache.update([ + featureFlagOne, + featureFlagTwo, + featureFlagThree, + ], [], -1); + cache.add(featureFlagWithEmptyFS); + + // Adding an existing FF should not affect the cache + cache.update([featureFlagTwo], [], -1); + + expect(cache.getNamesBySets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cache.getNamesBySets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesBySets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesBySets(['t'])).toEqual([emptySet]); // 't' not in filter + expect(cache.getNamesBySets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); + + cache.add({ ...featureFlagOne, sets: ['1'] }); + + expect(cache.getNamesBySets(['1'])).toEqual([emptySet]); // '1' not in filter + expect(cache.getNamesBySets(['o'])).toEqual([new Set(['ff_two'])]); + expect(cache.getNamesBySets(['n'])).toEqual([emptySet]); + + cache.add({ ...featureFlagOne, sets: ['x'] }); + expect(cache.getNamesBySets(['x'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesBySets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); + + + cache.remove(featureFlagOne.name); + expect(cache.getNamesBySets(['x'])).toEqual([emptySet]); + + cache.remove(featureFlagOne.name); + expect(cache.getNamesBySets(['y'])).toEqual([emptySet]); // 'y' not in filter + expect(cache.getNamesBySets([])).toEqual([]); + + cache.add(featureFlagWithoutFS); + expect(cache.getNamesBySets([])).toEqual([]); + + cache.clear(); + expect(cache.getNamesBySets(['o', 'e', 'x'])).toEqual([emptySet, emptySet, emptySet]); +}); + +// if sets are not defined, it should store all sets in memory. +test('DEFINITIONS CACHE / In Memory / sets cache tests without filters', () => { + const cacheWithoutFilters = new DefinitionsCacheInMemory(); + const emptySet = new Set([]); + + cacheWithoutFilters.update([ + featureFlagOne, + featureFlagTwo, + featureFlagThree, + ], [], -1); + cacheWithoutFilters.add(featureFlagWithEmptyFS); + + expect(cacheWithoutFilters.getNamesBySets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cacheWithoutFilters.getNamesBySets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cacheWithoutFilters.getNamesBySets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesBySets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesBySets(['y'])).toEqual([emptySet]); + expect(cacheWithoutFilters.getNamesBySets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); +}); diff --git a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts deleted file mode 100644 index 56e002d3..00000000 --- a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts +++ /dev/null @@ -1,190 +0,0 @@ -import { SplitsCacheInMemory } from '../SplitsCacheInMemory'; -import { IDefinition } from '../../../dtos/types'; -import { splitWithUserTT, splitWithAccountTT, something, somethingElse, featureFlagWithEmptyFS, featureFlagWithoutFS, featureFlagOne, featureFlagTwo, featureFlagThree } from '../../__tests__/testUtils'; - -test('SPLITS CACHE / In Memory', () => { - const cache = new SplitsCacheInMemory(); - - cache.update([something, somethingElse], [], -1); - - let values = cache.getAll(); - - expect(values).toEqual([something, somethingElse]); - - cache.removeSplit(something.name); - - const splits = cache.getSplits([something.name, somethingElse.name]); - expect(splits[something.name]).toEqual(null); - expect(splits[somethingElse.name]).toEqual(somethingElse); - - values = cache.getAll(); - - expect(values).toEqual([somethingElse]); - - expect(cache.getSplit(something.name)).toEqual(null); - expect(cache.getSplit(somethingElse.name)).toEqual(somethingElse); - - expect(cache.getChangeNumber()).toBe(-1); - cache.setChangeNumber(123); - expect(cache.getChangeNumber()).toBe(123); - -}); - -test('SPLITS CACHE / In Memory / Get Keys', () => { - const cache = new SplitsCacheInMemory(); - - cache.update([something, somethingElse], [], 1); - - const keys = cache.getSplitNames(); - - expect(keys.indexOf(something.name) !== -1).toBe(true); - expect(keys.indexOf(somethingElse.name) !== -1).toBe(true); -}); - -test('SPLITS CACHE / In Memory / Update Splits', () => { - const cache = new SplitsCacheInMemory(); - - cache.update([something, somethingElse], [], 1); - - cache.update([], [something.name, somethingElse.name], 1); - - expect(cache.getSplit(something.name)).toBe(null); - expect(cache.getSplit(somethingElse.name)).toBe(null); -}); - -test('SPLITS CACHE / In Memory / trafficTypeExists and ttcache tests', () => { - const cache = new SplitsCacheInMemory(); - - cache.update([ - { ...splitWithUserTT, name: 'split1' }, - { ...splitWithAccountTT, name: 'split2' }, - { ...splitWithUserTT, name: 'split3' }, - ], [], 1); - cache.addSplit({ ...splitWithUserTT, name: 'split4' }); - - expect(cache.trafficTypeExists('user_tt')).toBe(true); - expect(cache.trafficTypeExists('account_tt')).toBe(true); - expect(cache.trafficTypeExists('not_existent_tt')).toBe(false); - - cache.removeSplit('split4'); - - expect(cache.trafficTypeExists('user_tt')).toBe(true); - expect(cache.trafficTypeExists('account_tt')).toBe(true); - - cache.removeSplit('split3'); - cache.removeSplit('split2'); - - expect(cache.trafficTypeExists('user_tt')).toBe(true); - expect(cache.trafficTypeExists('account_tt')).toBe(false); - - cache.removeSplit('split1'); - - expect(cache.trafficTypeExists('user_tt')).toBe(false); - expect(cache.trafficTypeExists('account_tt')).toBe(false); - - cache.addSplit({ ...splitWithUserTT, name: 'split1' }); - expect(cache.trafficTypeExists('user_tt')).toBe(true); - - cache.addSplit({ ...splitWithAccountTT, name: 'split1' }); - expect(cache.trafficTypeExists('account_tt')).toBe(true); - expect(cache.trafficTypeExists('user_tt')).toBe(false); - -}); - -test('SPLITS CACHE / In Memory / killLocally', () => { - const cache = new SplitsCacheInMemory(); - cache.addSplit(something); - cache.addSplit(somethingElse); - const initialChangeNumber = cache.getChangeNumber(); - - // kill an non-existent split - let updated = cache.killLocally('nonexistent_split', 'other_treatment', 101); - const nonexistentSplit = cache.getSplit('nonexistent_split'); - - expect(updated).toBe(false); // killLocally resolves without update if split doesn't exist - expect(nonexistentSplit).toBe(null); // non-existent split keeps being non-existent - - // kill an existent split - updated = cache.killLocally(something.name, 'some_treatment', 100); - let lol1Split = cache.getSplit(something.name) as IDefinition; - - expect(updated).toBe(true); // killLocally resolves with update if split is changed - expect(lol1Split.killed).toBe(true); // existing split must be killed - expect(lol1Split.defaultTreatment).toBe('some_treatment'); // existing split must have new default treatment - expect(lol1Split.changeNumber).toBe(100); // existing split must have the given change number - expect(cache.getChangeNumber()).toBe(initialChangeNumber); // cache changeNumber is not changed - - // not update if changeNumber is old - updated = cache.killLocally(something.name, 'some_treatment_2', 90); - lol1Split = cache.getSplit(something.name) as IDefinition; - - expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old - expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older - -}); - -test('SPLITS CACHE / In Memory / flag set cache tests', () => { - // @ts-ignore - const cache = new SplitsCacheInMemory({ groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); - const emptySet = new Set([]); - - cache.update([ - featureFlagOne, - featureFlagTwo, - featureFlagThree, - ], [], -1); - cache.addSplit(featureFlagWithEmptyFS); - - // Adding an existing FF should not affect the cache - cache.update([featureFlagTwo], [], -1); - - expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); - expect(cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); - expect(cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); - - cache.addSplit({ ...featureFlagOne, sets: ['1'] }); - - expect(cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); - expect(cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); - - cache.addSplit({ ...featureFlagOne, sets: ['x'] }); - expect(cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); - - - cache.removeSplit(featureFlagOne.name); - expect(cache.getNamesByFlagSets(['x'])).toEqual([emptySet]); - - cache.removeSplit(featureFlagOne.name); - expect(cache.getNamesByFlagSets(['y'])).toEqual([emptySet]); // 'y' not in filter - expect(cache.getNamesByFlagSets([])).toEqual([]); - - cache.addSplit(featureFlagWithoutFS); - expect(cache.getNamesByFlagSets([])).toEqual([]); - - cache.clear(); - expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, emptySet, emptySet]); -}); - -// if FlagSets are not defined, it should store all FlagSets in memory. -test('SPLITS CACHE / In Memory / flag set cache tests without filters', () => { - const cacheWithoutFilters = new SplitsCacheInMemory(); - const emptySet = new Set([]); - - cacheWithoutFilters.update([ - featureFlagOne, - featureFlagTwo, - featureFlagThree, - ], [], -1); - cacheWithoutFilters.addSplit(featureFlagWithEmptyFS); - - expect(cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); -}); diff --git a/src/storages/inRedis/SplitsCacheInRedis.ts b/src/storages/inRedis/DefinitionsCacheInRedis.ts similarity index 56% rename from src/storages/inRedis/SplitsCacheInRedis.ts rename to src/storages/inRedis/DefinitionsCacheInRedis.ts index 29978eaf..ff745c23 100644 --- a/src/storages/inRedis/SplitsCacheInRedis.ts +++ b/src/storages/inRedis/DefinitionsCacheInRedis.ts @@ -3,7 +3,7 @@ import { KeyBuilderSS } from '../KeyBuilderSS'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; -import { AbstractSplitsCacheAsync } from '../AbstractSplitsCacheAsync'; +import { AbstractDefinitionsCacheAsync } from '../AbstractDefinitionsCacheAsync'; import { returnDifference } from '../../utils/lang/sets'; import type { RedisAdapter } from './RedisAdapter'; @@ -18,23 +18,23 @@ function processPipelineAnswer(results: Array<[Error | null, unknown]> | null): } /** - * ISplitsCacheAsync implementation that stores split definitions in Redis. + * IDefinitionsCacheAsync implementation that stores definitions in Redis. * Supported by Node.js */ -export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { +export class DefinitionsCacheInRedis extends AbstractDefinitionsCacheAsync { private readonly log: ILogger; private readonly redis: RedisAdapter; private readonly keys: KeyBuilderSS; private redisError?: Error; - private readonly flagSetsFilter: string[]; + private readonly setsFilter: string[]; constructor(log: ILogger, keys: KeyBuilderSS, redis: RedisAdapter, splitFiltersValidation?: ISplitFiltersValidation) { super(); this.log = log; this.redis = redis; this.keys = keys; - this.flagSetsFilter = splitFiltersValidation ? splitFiltersValidation.groupedFilters.bySet : []; + this.setsFilter = splitFiltersValidation ? splitFiltersValidation.groupedFilters.bySet : []; // There is no need to listen for redis 'error' event, because in that case ioredis calls will be rejected and handled by redis storage adapters. // But it is done just to avoid getting the ioredis message `Unhandled error event`. @@ -47,95 +47,95 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { }); } - private _decrementCounts(split: IDefinition) { - const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); + private _decrementCounts(definition: IDefinition) { + const ttKey = this.keys.buildTrafficTypeKey(definition.trafficTypeName); return this.redis.decr(ttKey).then((count: number) => { if (count === 0) return this.redis.del(ttKey); }); } - private _incrementCounts(split: IDefinition) { - const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); + private _incrementCounts(definition: IDefinition) { + const ttKey = this.keys.buildTrafficTypeKey(definition.trafficTypeName); return this.redis.incr(ttKey); } - private _updateFlagSets(featureFlagName: string, flagSetsOfRemovedFlag?: string[] | null, flagSetsOfAddedFlag?: string[] | null) { - const removeFromFlagSets = returnDifference(flagSetsOfRemovedFlag, flagSetsOfAddedFlag); + private _updateSets(definitionName: string, setsOfRemovedDefinition?: string[] | null, setsOfAddedDefinition?: string[] | null) { + const removeFromSets = returnDifference(setsOfRemovedDefinition, setsOfAddedDefinition); - let addToFlagSets = returnDifference(flagSetsOfAddedFlag, flagSetsOfRemovedFlag); - if (this.flagSetsFilter.length > 0) { - addToFlagSets = addToFlagSets.filter(flagSet => { - return this.flagSetsFilter.some(filterFlagSet => filterFlagSet === flagSet); + let addToSets = returnDifference(setsOfAddedDefinition, setsOfRemovedDefinition); + if (this.setsFilter.length > 0) { + addToSets = addToSets.filter(set => { + return this.setsFilter.some(filterSet => filterSet === set); }); } - const items = [featureFlagName]; + const items = [definitionName]; return Promise.all([ - ...removeFromFlagSets.map(flagSetName => this.redis.srem(this.keys.buildFlagSetKey(flagSetName), items)), - ...addToFlagSets.map(flagSetName => this.redis.sadd(this.keys.buildFlagSetKey(flagSetName), items)) + ...removeFromSets.map(setName => this.redis.srem(this.keys.buildSetKey(setName), items)), + ...addToSets.map(setName => this.redis.sadd(this.keys.buildSetKey(setName), items)) ]); } /** - * Add a given split. + * Add a given definition. * The returned promise is resolved when the operation success * or rejected if it fails (e.g., redis operation fails) */ - addSplit(split: IDefinition): Promise { - const name = split.name; - const splitKey = this.keys.buildSplitKey(name); - return this.redis.get(splitKey).then((splitFromStorage: string | null) => { + add(definition: IDefinition): Promise { + const name = definition.name; + const definitionKey = this.keys.buildDefinitionKey(name); + return this.redis.get(definitionKey).then((definitionFromStorage: string | null) => { // handling parsing error - let parsedPreviousSplit: IDefinition, stringifiedNewSplit; + let parsedPreviousDefinition: IDefinition, stringifiedNewDefinition; try { - parsedPreviousSplit = splitFromStorage ? JSON.parse(splitFromStorage) : undefined; - stringifiedNewSplit = JSON.stringify(split); + parsedPreviousDefinition = definitionFromStorage ? JSON.parse(definitionFromStorage) : undefined; + stringifiedNewDefinition = JSON.stringify(definition); } catch (e) { throw new Error('Error parsing feature flag definition: ' + e); } - return this.redis.set(splitKey, stringifiedNewSplit).then(() => { + return this.redis.set(definitionKey, stringifiedNewDefinition).then(() => { // avoid unnecessary increment/decrement operations - if (parsedPreviousSplit && parsedPreviousSplit.trafficTypeName === split.trafficTypeName) return; + if (parsedPreviousDefinition && parsedPreviousDefinition.trafficTypeName === definition.trafficTypeName) return; // update traffic type counts - return this._incrementCounts(split).then(() => { - if (parsedPreviousSplit) return this._decrementCounts(parsedPreviousSplit); + return this._incrementCounts(definition).then(() => { + if (parsedPreviousDefinition) return this._decrementCounts(parsedPreviousDefinition); }); - }).then(() => this._updateFlagSets(name, parsedPreviousSplit && parsedPreviousSplit.sets, split.sets)); + }).then(() => this._updateSets(name, parsedPreviousDefinition && parsedPreviousDefinition.sets, definition.sets)); }).then(() => true); } /** - * Remove a given split. - * The returned promise is resolved when the operation success, with true or false indicating if the split existed (and was removed) or not. + * Remove a given definition. + * The returned promise is resolved when the operation success, with true or false indicating if the definition existed (and was removed) or not. * or rejected if it fails (e.g., redis operation fails). */ - removeSplit(name: string) { - return this.getSplit(name).then((split) => { - if (split) { - return this._decrementCounts(split).then(() => this._updateFlagSets(name, split.sets)); + remove(name: string) { + return this.get(name).then((definition) => { + if (definition) { + return this._decrementCounts(definition).then(() => this._updateSets(name, definition.sets)); } }).then(() => { - return this.redis.del(this.keys.buildSplitKey(name)).then((status: number) => status === 1); + return this.redis.del(this.keys.buildDefinitionKey(name)).then((status: number) => status === 1); }); } /** - * Get split definition or null if it's not defined. + * Get definition or null if it's not defined. * Returned promise is rejected if redis operation fails. */ - getSplit(name: string): Promise { + get(name: string): Promise { if (this.redisError) { this.log.error(LOG_PREFIX + this.redisError); return Promise.reject(this.redisError); } - return this.redis.get(this.keys.buildSplitKey(name)) - .then((maybeSplit: string | null) => maybeSplit && JSON.parse(maybeSplit)); + return this.redis.get(this.keys.buildDefinitionKey(name)) + .then((maybeDefinition: string | null) => maybeDefinition && JSON.parse(maybeDefinition)); } /** @@ -144,7 +144,7 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * or rejected if it fails. */ setChangeNumber(changeNumber: number): Promise { - return this.redis.set(this.keys.buildSplitsTillKey(), changeNumber + '').then( + return this.redis.set(this.keys.buildDefinitionsTillKey(), changeNumber + '').then( (status: string | null) => status === 'OK' ); } @@ -155,7 +155,7 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * The promise will never be rejected. */ getChangeNumber(): Promise { - return this.redis.get(this.keys.buildSplitsTillKey()).then((value: string | null) => { + return this.redis.get(this.keys.buildDefinitionsTillKey()).then((value: string | null) => { const i = parseInt(value as string, 10); return isNaNNumber(i) ? -1 : i; @@ -166,44 +166,44 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { } /** - * Get list of all split definitions. - * The returned promise is resolved with the list of split definitions, + * Get list of all definitions. + * The returned promise is resolved with the list of definitions, * or rejected if redis operation fails. */ // @TODO we need to benchmark which is the maximun number of commands we could pipeline without kill redis performance. getAll(): Promise { - return this.redis.keys(this.keys.searchPatternForSplitKeys()) + return this.redis.keys(this.keys.searchPatternForDefinitionKeys()) .then((listOfKeys: string[]) => this.redis.pipeline(listOfKeys.map((k: string) => ['get', k])).exec()) .then(processPipelineAnswer) - .then((splitDefinitions: string[]) => splitDefinitions.map((splitDefinition: string) => { - return JSON.parse(splitDefinition); + .then((definitions: string[]) => definitions.map((definition: string) => { + return JSON.parse(definition); })); } /** - * Get list of split names. - * The returned promise is resolved with the list of split names, + * Get list of definition names. + * The returned promise is resolved with the list of names, * or rejected if redis operation fails. */ - getSplitNames(): Promise { - return this.redis.keys(this.keys.searchPatternForSplitKeys()).then( + getNames(): Promise { + return this.redis.keys(this.keys.searchPatternForDefinitionKeys()).then( (listOfKeys: string[]) => listOfKeys.map(this.keys.extractKey) ); } /** - * Get list of feature flag names related to a given list of flag set names. - * The returned promise is resolved with the list of feature flag names per flag set, + * Get list of definition names related to a given list of set names. + * The returned promise is resolved with the list of names per set, * or rejected if the pipelined redis operation fails (e.g., timeout). */ - getNamesByFlagSets(flagSets: string[]): Promise[]> { - return this.redis.pipeline(flagSets.map(flagSet => ['smembers', this.keys.buildFlagSetKey(flagSet)])).exec() + getNamesBySets(sets: string[]): Promise[]> { + return this.redis.pipeline(sets.map(set => ['smembers', this.keys.buildSetKey(set)])).exec() .then((results: [Error | null, unknown][] | null) => results ? results.map(([e, value]: [Error | null, unknown], index: number) => { if (e === null) return value as string; - this.log.error(LOG_PREFIX + `Could not read result from get members of flag set ${flagSets[index]} due to an error: ${e}`); + this.log.error(LOG_PREFIX + `Could not read result from get members of set ${sets[index]} due to an error: ${e}`); }) : []) - .then((namesByFlagSets: (string | undefined)[]) => namesByFlagSets.map((namesByFlagSet: string | undefined) => new Set(namesByFlagSet))); + .then((namesBySets: (string | undefined)[]) => namesBySets.map((namesBySet: string | undefined) => new Set(namesBySet))); } /** @@ -239,25 +239,25 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { } /** - * Fetches multiple splits definitions. + * Fetches multiple definitions. * Returned promise is rejected if redis operation fails. */ - getSplits(names: string[]): Promise> { + getMany(names: string[]): Promise> { if (this.redisError) { this.log.error(LOG_PREFIX + this.redisError); return Promise.reject(this.redisError); } - const keys = names.map(name => this.keys.buildSplitKey(name)); + const keys = names.map(name => this.keys.buildDefinitionKey(name)); return this.redis.mget(...keys) - .then((splitDefinitions: (string | null)[]) => { - const splits: Record = {}; + .then((stringifiedDefinitions: (string | null)[]) => { + const definitions: Record = {}; names.forEach((name, idx) => { - const split = splitDefinitions[idx]; - splits[name] = split && JSON.parse(split); + const definition = stringifiedDefinitions[idx]; + definitions[name] = definition && JSON.parse(definition); }); - return Promise.resolve(splits); + return Promise.resolve(definitions); }) .catch((e: unknown) => { this.log.error(LOG_PREFIX + `Could not grab feature flags due to an error: ${e}.`); diff --git a/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts b/src/storages/inRedis/__tests__/DefinitionsCacheInRedis.spec.ts similarity index 57% rename from src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts rename to src/storages/inRedis/__tests__/DefinitionsCacheInRedis.spec.ts index 06b3edfe..40f05050 100644 --- a/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts +++ b/src/storages/inRedis/__tests__/DefinitionsCacheInRedis.spec.ts @@ -1,4 +1,4 @@ -import { SplitsCacheInRedis } from '../SplitsCacheInRedis'; +import { DefinitionsCacheInRedis } from '../DefinitionsCacheInRedis'; import { KeyBuilderSS } from '../../KeyBuilderSS'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { splitWithUserTT, splitWithAccountTT, featureFlagOne, featureFlagThree, featureFlagTwo, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; @@ -9,11 +9,11 @@ import { RedisAdapter } from '../RedisAdapter'; const prefix = 'splits_cache_ut'; const keysBuilder = new KeyBuilderSS(prefix, metadata); -describe('SPLITS CACHE REDIS', () => { +describe('DEFINITIONS CACHE REDIS', () => { test('add/remove/get splits', async () => { const connection = new RedisAdapter(loggerMock); - const cache = new SplitsCacheInRedis(loggerMock, keysBuilder, connection); + const cache = new DefinitionsCacheInRedis(loggerMock, keysBuilder, connection); await cache.update([splitWithUserTT, splitWithAccountTT], [], -1); @@ -22,107 +22,107 @@ describe('SPLITS CACHE REDIS', () => { expect(values).toHaveLength(2); expect(values).toEqual(values[0].trafficTypeName === splitWithUserTT.trafficTypeName ? [splitWithUserTT, splitWithAccountTT] : [splitWithAccountTT, splitWithUserTT]); - let splitNames = await cache.getSplitNames(); + let splitNames = await cache.getNames(); expect(splitNames.length).toBe(2); expect(splitNames.indexOf('user_ff') !== -1).toBe(true); expect(splitNames.indexOf('account_ff') !== -1).toBe(true); - await cache.removeSplit('user_ff'); + await cache.remove('user_ff'); values = await cache.getAll(); expect(values).toEqual([splitWithAccountTT]); - expect(await cache.getSplit('user_ff')).toEqual(null); - expect(await cache.getSplit('account_ff')).toEqual(splitWithAccountTT); + expect(await cache.get('user_ff')).toEqual(null); + expect(await cache.get('account_ff')).toEqual(splitWithAccountTT); await cache.setChangeNumber(123); expect(await cache.getChangeNumber()).toBe(123); - splitNames = await cache.getSplitNames(); + splitNames = await cache.getNames(); expect(splitNames.indexOf('user_ff') === -1).toBe(true); expect(splitNames.indexOf('account_ff') !== -1).toBe(true); - const splits = await cache.getSplits(['user_ff', 'account_ff']); - expect(splits['user_ff']).toEqual(null); - expect(splits['account_ff']).toEqual(splitWithAccountTT); + const definitions = await cache.getMany(['user_ff', 'account_ff']); + expect(definitions['user_ff']).toEqual(null); + expect(definitions['account_ff']).toEqual(splitWithAccountTT); // Teardown. @TODO use cache clear method when implemented await connection.del(keysBuilder.buildTrafficTypeKey('account_tt')); - await connection.del(keysBuilder.buildSplitKey('account_ff')); - await connection.del(keysBuilder.buildSplitsTillKey()); + await connection.del(keysBuilder.buildDefinitionKey('account_ff')); + await connection.del(keysBuilder.buildDefinitionsTillKey()); await connection.disconnect(); }); test('trafficTypeExists', async () => { const connection = new RedisAdapter(loggerMock); - const cache = new SplitsCacheInRedis(loggerMock, keysBuilder, connection); + const cache = new DefinitionsCacheInRedis(loggerMock, keysBuilder, connection); await cache.update([ { ...splitWithUserTT, name: 'split1' }, { ...splitWithAccountTT, name: 'split2' }, { ...splitWithUserTT, name: 'split3' }, ], [], -1); - await cache.addSplit({ ...splitWithUserTT, name: 'split4' }); - await cache.addSplit({ ...splitWithUserTT, name: 'split4' }); // trying to add the same definition for an already added split will not have effect + await cache.add({ ...splitWithUserTT, name: 'split4' }); + await cache.add({ ...splitWithUserTT, name: 'split4' }); // trying to add the same definition for an already added split will not have effect expect(await cache.trafficTypeExists('user_tt')).toBe(true); expect(await cache.trafficTypeExists('account_tt')).toBe(true); expect(await cache.trafficTypeExists('not_existent_tt')).toBe(false); - await cache.removeSplit('split4'); + await cache.remove('split4'); expect(await cache.trafficTypeExists('user_tt')).toBe(true); expect(await cache.trafficTypeExists('account_tt')).toBe(true); expect(await connection.get(keysBuilder.buildTrafficTypeKey('account_tt'))).toBe('1'); - await cache.removeSplit('split3'); - await cache.removeSplit('split2'); + await cache.remove('split3'); + await cache.remove('split2'); expect(await cache.trafficTypeExists('user_tt')).toBe(true); expect(await cache.trafficTypeExists('account_tt')).toBe(false); expect(await connection.get(keysBuilder.buildTrafficTypeKey('account_tt'))).toBe(null); // TT entry should be removed in the wrapper - await cache.removeSplit('split1'); + await cache.remove('split1'); expect(await cache.trafficTypeExists('user_tt')).toBe(false); expect(await cache.trafficTypeExists('account_tt')).toBe(false); - await cache.addSplit({ ...splitWithUserTT, name: 'split1' }); + await cache.add({ ...splitWithUserTT, name: 'split1' }); expect(await cache.trafficTypeExists('user_tt')).toBe(true); - await cache.addSplit({ ...splitWithAccountTT, name: 'split1' }); + await cache.add({ ...splitWithAccountTT, name: 'split1' }); expect(await cache.trafficTypeExists('account_tt')).toBe(true); expect(await cache.trafficTypeExists('user_tt')).toBe(false); // Teardown. @TODO use cache clear method when implemented await connection.del(keysBuilder.buildTrafficTypeKey('account_tt')); - await connection.del(keysBuilder.buildSplitKey('malformed')); - await connection.del(keysBuilder.buildSplitKey('split1')); + await connection.del(keysBuilder.buildDefinitionKey('malformed')); + await connection.del(keysBuilder.buildDefinitionKey('split1')); await connection.disconnect(); }); test('killLocally', async () => { const connection = new RedisAdapter(loggerMock); - const cache = new SplitsCacheInRedis(loggerMock, keysBuilder, connection); + const cache = new DefinitionsCacheInRedis(loggerMock, keysBuilder, connection); await cache.update([splitWithUserTT, splitWithAccountTT], [], -1); const initialChangeNumber = await cache.getChangeNumber(); // kill an non-existent split let updated = await cache.killLocally('nonexistent_split', 'other_treatment', 101); - const nonexistentSplit = await cache.getSplit('nonexistent_split'); + const nonexistentSplit = await cache.get('nonexistent_split'); expect(updated).toBe(false); // killLocally resolves without update if split doesn't exist expect(nonexistentSplit).toBe(null); // non-existent split keeps being non-existent // kill an existent split updated = await cache.killLocally('user_ff', 'some_treatment', 100); - let lol1Split = await cache.getSplit('user_ff') as IDefinition; + let lol1Split = await cache.get('user_ff') as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -132,21 +132,21 @@ describe('SPLITS CACHE REDIS', () => { // not update if changeNumber is old updated = await cache.killLocally('user_ff', 'some_treatment_2', 90); - lol1Split = await cache.getSplit('user_ff') as IDefinition; + lol1Split = await cache.get('user_ff') as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older // Delete splits and TT keys await cache.update([], [splitWithUserTT.name, splitWithAccountTT.name], -1); - await connection.del(keysBuilder.buildSplitsTillKey()); + await connection.del(keysBuilder.buildDefinitionsTillKey()); expect(await connection.keys(`${prefix}*`)).toHaveLength(0); await connection.disconnect(); }); test('flag set cache tests', async () => { const connection = new RedisAdapter(loggerMock); // @ts-ignore - const cache = new SplitsCacheInRedis(loggerMock, keysBuilder, connection, { groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); + const cache = new DefinitionsCacheInRedis(loggerMock, keysBuilder, connection, { groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); const emptySet = new Set([]); @@ -155,44 +155,44 @@ describe('SPLITS CACHE REDIS', () => { featureFlagTwo, featureFlagThree, ], [], -1); - await cache.addSplit(featureFlagWithEmptyFS); + await cache.add(featureFlagWithEmptyFS); - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); - expect(await cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); - expect(await cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(await cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesBySets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cache.getNamesBySets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesBySets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesBySets(['t'])).toEqual([emptySet]); // 't' not in filter + expect(await cache.getNamesBySets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); - await cache.addSplit({ ...featureFlagOne, sets: ['1'] }); + await cache.add({ ...featureFlagOne, sets: ['1'] }); - expect(await cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); - expect(await cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); + expect(await cache.getNamesBySets(['1'])).toEqual([emptySet]); // '1' not in filter + expect(await cache.getNamesBySets(['o'])).toEqual([new Set(['ff_two'])]); + expect(await cache.getNamesBySets(['n'])).toEqual([emptySet]); - await cache.addSplit({ ...featureFlagOne, sets: ['x'] }); - expect(await cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); + await cache.add({ ...featureFlagOne, sets: ['x'] }); + expect(await cache.getNamesBySets(['x'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesBySets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); // @ts-ignore Simulate an error in connection.pipeline().exec() jest.spyOn(connection, 'pipeline').mockImplementationOnce(() => { return { exec: () => Promise.resolve([['error', null], [null, ['ff_three']], [null, ['ff_one']]]) }; }); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, new Set(['ff_three']), new Set(['ff_one'])]); + expect(await cache.getNamesBySets(['o', 'e', 'x'])).toEqual([emptySet, new Set(['ff_three']), new Set(['ff_one'])]); (connection.pipeline as jest.Mock).mockRestore(); - await cache.removeSplit(featureFlagOne.name); - expect(await cache.getNamesByFlagSets(['x'])).toEqual([emptySet]); + await cache.remove(featureFlagOne.name); + expect(await cache.getNamesBySets(['x'])).toEqual([emptySet]); - await cache.removeSplit(featureFlagOne.name); - expect(await cache.getNamesByFlagSets(['y'])).toEqual([emptySet]); // 'y' not in filter - expect(await cache.getNamesByFlagSets([])).toEqual([]); + await cache.remove(featureFlagOne.name); + expect(await cache.getNamesBySets(['y'])).toEqual([emptySet]); // 'y' not in filter + expect(await cache.getNamesBySets([])).toEqual([]); - await cache.addSplit({ ...featureFlagWithoutFS, name: featureFlagWithEmptyFS.name }); - expect(await cache.getNamesByFlagSets([])).toEqual([]); + await cache.add({ ...featureFlagWithoutFS, name: featureFlagWithEmptyFS.name }); + expect(await cache.getNamesBySets([])).toEqual([]); // Delete splits, TT and flag set keys await cache.update([], [featureFlagThree.name, featureFlagTwo.name, featureFlagWithEmptyFS.name], -1); - await connection.del(keysBuilder.buildSplitsTillKey()); + await connection.del(keysBuilder.buildDefinitionsTillKey()); expect(await connection.keys(`${prefix}*`)).toHaveLength(0); await connection.disconnect(); }); @@ -200,7 +200,7 @@ describe('SPLITS CACHE REDIS', () => { // if FlagSets filter is not defined, it should store all FlagSets in memory. test('flag set cache tests without filters', async () => { const connection = new RedisAdapter(loggerMock); - const cacheWithoutFilters = new SplitsCacheInRedis(loggerMock, keysBuilder, connection); + const cacheWithoutFilters = new DefinitionsCacheInRedis(loggerMock, keysBuilder, connection); const emptySet = new Set([]); @@ -209,18 +209,18 @@ describe('SPLITS CACHE REDIS', () => { featureFlagTwo, featureFlagThree ], [], -1); - await cacheWithoutFilters.addSplit(featureFlagWithEmptyFS); + await cacheWithoutFilters.add(featureFlagWithEmptyFS); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesBySets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cacheWithoutFilters.getNamesBySets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cacheWithoutFilters.getNamesBySets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesBySets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesBySets(['y'])).toEqual([emptySet]); + expect(await cacheWithoutFilters.getNamesBySets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); // Delete splits, TT and flag set keys await cacheWithoutFilters.update([], [featureFlagThree.name, featureFlagTwo.name, featureFlagOne.name, featureFlagWithEmptyFS.name], -1); - await connection.del(keysBuilder.buildSplitsTillKey()); + await connection.del(keysBuilder.buildDefinitionsTillKey()); expect(await connection.keys(`${prefix}*`)).toHaveLength(0); await connection.disconnect(); }); diff --git a/src/storages/inRedis/index.ts b/src/storages/inRedis/index.ts index e5d86d6e..5294ef80 100644 --- a/src/storages/inRedis/index.ts +++ b/src/storages/inRedis/index.ts @@ -2,7 +2,7 @@ import type { RedisAdapter } from './RedisAdapter'; import { IStorageAsync, IStorageAsyncFactory, IStorageFactoryParams } from '../types'; import { validatePrefix } from '../KeyBuilder'; import { KeyBuilderSS } from '../KeyBuilderSS'; -import { SplitsCacheInRedis } from './SplitsCacheInRedis'; +import { DefinitionsCacheInRedis } from './DefinitionsCacheInRedis'; import { SegmentsCacheInRedis } from './SegmentsCacheInRedis'; import { ImpressionsCacheInRedis } from './ImpressionsCacheInRedis'; import { EventsCacheInRedis } from './EventsCacheInRedis'; @@ -59,7 +59,7 @@ export function InRedisStorage(options: InRedisStorageOptions = {}): IStorageAsy }); return { - splits: new SplitsCacheInRedis(log, keys, redisClient, settings.sync.__splitFiltersValidation), + definitions: new DefinitionsCacheInRedis(log, keys, redisClient, settings.sync.__splitFiltersValidation), rbSegments: new RBSegmentsCacheInRedis(log, keys, redisClient), segments: new SegmentsCacheInRedis(log, keys, redisClient), impressions: new ImpressionsCacheInRedis(log, keys.buildImpressionsKey(), redisClient, metadata), diff --git a/src/storages/pluggable/DefinitionsCachePluggable.ts b/src/storages/pluggable/DefinitionsCachePluggable.ts new file mode 100644 index 00000000..92c4b474 --- /dev/null +++ b/src/storages/pluggable/DefinitionsCachePluggable.ts @@ -0,0 +1,230 @@ +import { isFiniteNumber, isNaNNumber } from '../../utils/lang'; +import { KeyBuilder } from '../KeyBuilder'; +import { IPluggableStorageWrapper } from '../types'; +import { ILogger } from '../../logger/types'; +import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; +import { LOG_PREFIX } from './constants'; +import { AbstractDefinitionsCacheAsync } from '../AbstractDefinitionsCacheAsync'; +import { returnDifference } from '../../utils/lang/sets'; + +/** + * IDefinitionsCacheAsync implementation for pluggable storages. + */ +export class DefinitionsCachePluggable extends AbstractDefinitionsCacheAsync { + + private readonly log: ILogger; + private readonly keys: KeyBuilder; + private readonly wrapper: IPluggableStorageWrapper; + private readonly setsFilter: string[]; + + /** + * Create a DefinitionsCache that uses a storage wrapper. + * @param log - Logger instance. + * @param keys - Key builder. + * @param wrapper - Adapted wrapper storage. + */ + constructor(log: ILogger, keys: KeyBuilder, wrapper: IPluggableStorageWrapper, splitFiltersValidation?: ISplitFiltersValidation) { + super(); + this.log = log; + this.keys = keys; + this.wrapper = wrapper; + this.setsFilter = splitFiltersValidation ? splitFiltersValidation.groupedFilters.bySet : []; + } + + private _decrementCounts(definition: IDefinition) { + const ttKey = this.keys.buildTrafficTypeKey(definition.trafficTypeName); + return this.wrapper.decr(ttKey).then(count => { + if (count === 0) return this.wrapper.del(ttKey); + }); + } + + private _incrementCounts(definition: IDefinition) { + const ttKey = this.keys.buildTrafficTypeKey(definition.trafficTypeName); + return this.wrapper.incr(ttKey); + } + + private _updateSets(definitionName: string, setsOfRemovedDefinition?: string[] | null, setsOfAddedDefinition?: string[] | null) { + const removeFromSets = returnDifference(setsOfRemovedDefinition, setsOfAddedDefinition); + + let addToSets = returnDifference(setsOfAddedDefinition, setsOfRemovedDefinition); + if (this.setsFilter.length > 0) { + addToSets = addToSets.filter(set => { + return this.setsFilter.some(filterSet => filterSet === set); + }); + } + + const items = [definitionName]; + + return Promise.all([ + ...removeFromSets.map(setName => this.wrapper.removeItems(this.keys.buildSetKey(setName), items)), + ...addToSets.map(setName => this.wrapper.addItems(this.keys.buildSetKey(setName), items)) + ]); + } + + /** + * Add a given definition. + * The returned promise is resolved when the operation success + * or rejected if it fails (e.g., wrapper operation fails) + */ + add(definition: IDefinition): Promise { + const name = definition.name; + const definitionKey = this.keys.buildDefinitionKey(name); + return this.wrapper.get(definitionKey).then(definitionFromStorage => { + + // handling parsing error + let parsedPreviousDefinition: IDefinition, stringifiedNewDefinition; + try { + parsedPreviousDefinition = definitionFromStorage ? JSON.parse(definitionFromStorage) : undefined; + stringifiedNewDefinition = JSON.stringify(definition); + } catch (e) { + throw new Error('Error parsing feature flag definition: ' + e); + } + + return this.wrapper.set(definitionKey, stringifiedNewDefinition).then(() => { + // avoid unnecessary increment/decrement operations + if (parsedPreviousDefinition && parsedPreviousDefinition.trafficTypeName === definition.trafficTypeName) return; + + // update traffic type counts + return this._incrementCounts(definition).then(() => { + if (parsedPreviousDefinition) return this._decrementCounts(parsedPreviousDefinition); + }); + }).then(() => this._updateSets(name, parsedPreviousDefinition && parsedPreviousDefinition.sets, definition.sets)); + }).then(() => true); + } + + /** + * Remove a given definition. + * The returned promise is resolved when the operation success, with a boolean indicating if the definition existed or not. + * or rejected if it fails (e.g., wrapper operation fails). + */ + remove(name: string) { + return this.get(name).then((definition) => { + if (definition) { + return this._decrementCounts(definition).then(() => this._updateSets(name, definition.sets)); + } + }).then(() => { + return this.wrapper.del(this.keys.buildDefinitionKey(name)); + }); + } + + /** + * Get definition. + * The returned promise is resolved with the definition or null if it's not defined, + * or rejected if wrapper operation fails. + */ + get(name: string): Promise { + return this.wrapper.get(this.keys.buildDefinitionKey(name)) + .then(maybeDefinition => maybeDefinition && JSON.parse(maybeDefinition)); + } + + /** + * Get list of definitions. + * The returned promise is resolved with a map of names to their definition or null if it's not defined, + * or rejected if wrapper operation fails. + */ + getMany(names: string[]): Promise> { + const keys = names.map(name => this.keys.buildDefinitionKey(name)); + + return this.wrapper.getMany(keys).then(stringifiedDefinitions => { + const definitions: Record = {}; + names.forEach((name, idx) => { + const definition = stringifiedDefinitions[idx]; + definitions[name] = definition && JSON.parse(definition); + }); + return Promise.resolve(definitions); + }); + } + + /** + * Get list of all definitions. + * The returned promise is resolved with the list of definitions, + * or rejected if wrapper operation fails. + */ + getAll(): Promise { + return this.wrapper.getKeysByPrefix(this.keys.buildDefinitionKeyPrefix()) + .then((listOfKeys) => this.wrapper.getMany(listOfKeys)) + .then((definitions) => definitions.map((definition) => { + return JSON.parse(definition as string); + })); + } + + /** + * Get list of definition names. + * The returned promise is resolved with the list of names, + * or rejected if wrapper operation fails. + */ + getNames(): Promise { + return this.wrapper.getKeysByPrefix(this.keys.buildDefinitionKeyPrefix()).then( + (listOfKeys) => listOfKeys.map(this.keys.extractKey) + ); + } + + /** + * Get list of definition names related to a given list of set names. + * The returned promise is resolved with the list of names per set. + * It never rejects (If there is a wrapper error for some set, an empty set is returned for it). + */ + getNamesBySets(sets: string[]): Promise[]> { + return Promise.all(sets.map(set => { + const setKey = this.keys.buildSetKey(set); + return this.wrapper.getItems(setKey).catch(() => []); + })).then(namesBySets => namesBySets.map(namesBySet => new Set(namesBySet))); + } + + /** + * Check traffic type existence. + * The returned promise is resolved with a boolean indicating whether the TT exist or not. + * In case of wrapper operation failures, the promise resolves with a true value, assuming that the TT might exist. + * It will never be rejected. + */ + trafficTypeExists(trafficType: string): Promise { + // If there is a number there should be > 0, otherwise the TT is considered as not existent. + return this.wrapper.get(this.keys.buildTrafficTypeKey(trafficType)) + .then((ttCount: string | null | number) => { + if (ttCount === null) return false; // if entry doesn't exist, means that TT doesn't exist + + ttCount = parseInt(ttCount as string, 10); + if (!isFiniteNumber(ttCount) || ttCount < 0) { + this.log.info(LOG_PREFIX + `Could not validate traffic type existence of ${trafficType} due to data corruption of some sorts.`); + return false; + } + + return ttCount > 0; + }).catch(e => { + this.log.error(LOG_PREFIX + `Could not validate traffic type existence of ${trafficType} due to an error: ${e}.`); + // If there is an error, bypass the validation so the event can get tracked. + return true; + }); + } + + /** + * Set till number. + * The returned promise is resolved when the operation success, + * or rejected if it fails (e.g., wrapper operation fails). + */ + setChangeNumber(changeNumber: number) { + return this.wrapper.set(this.keys.buildDefinitionsTillKey(), changeNumber + ''); + } + + /** + * Get till number or -1 if it's not defined. + * The returned promise is resolved with the changeNumber or -1 if it doesn't exist or a wrapper operation fails. + * The promise will never be rejected. + */ + getChangeNumber(): Promise { + return this.wrapper.get(this.keys.buildDefinitionsTillKey()).then((value) => { + const i = parseInt(value as string, 10); + + return isNaNNumber(i) ? -1 : i; + }).catch((e) => { + this.log.error(LOG_PREFIX + 'Could not retrieve changeNumber from storage. Error: ' + e); + return -1; + }); + } + + // @TODO implement if required by DataLoader or producer mode + clear() { + return Promise.resolve(true); + } + +} diff --git a/src/storages/pluggable/SplitsCachePluggable.ts b/src/storages/pluggable/SplitsCachePluggable.ts deleted file mode 100644 index 3285b592..00000000 --- a/src/storages/pluggable/SplitsCachePluggable.ts +++ /dev/null @@ -1,230 +0,0 @@ -import { isFiniteNumber, isNaNNumber } from '../../utils/lang'; -import { KeyBuilder } from '../KeyBuilder'; -import { IPluggableStorageWrapper } from '../types'; -import { ILogger } from '../../logger/types'; -import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; -import { LOG_PREFIX } from './constants'; -import { AbstractSplitsCacheAsync } from '../AbstractSplitsCacheAsync'; -import { returnDifference } from '../../utils/lang/sets'; - -/** - * ISplitsCacheAsync implementation for pluggable storages. - */ -export class SplitsCachePluggable extends AbstractSplitsCacheAsync { - - private readonly log: ILogger; - private readonly keys: KeyBuilder; - private readonly wrapper: IPluggableStorageWrapper; - private readonly flagSetsFilter: string[]; - - /** - * Create a SplitsCache that uses a storage wrapper. - * @param log - Logger instance. - * @param keys - Key builder. - * @param wrapper - Adapted wrapper storage. - */ - constructor(log: ILogger, keys: KeyBuilder, wrapper: IPluggableStorageWrapper, splitFiltersValidation?: ISplitFiltersValidation) { - super(); - this.log = log; - this.keys = keys; - this.wrapper = wrapper; - this.flagSetsFilter = splitFiltersValidation ? splitFiltersValidation.groupedFilters.bySet : []; - } - - private _decrementCounts(split: IDefinition) { - const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); - return this.wrapper.decr(ttKey).then(count => { - if (count === 0) return this.wrapper.del(ttKey); - }); - } - - private _incrementCounts(split: IDefinition) { - const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); - return this.wrapper.incr(ttKey); - } - - private _updateFlagSets(featureFlagName: string, flagSetsOfRemovedFlag?: string[] | null, flagSetsOfAddedFlag?: string[] | null) { - const removeFromFlagSets = returnDifference(flagSetsOfRemovedFlag, flagSetsOfAddedFlag); - - let addToFlagSets = returnDifference(flagSetsOfAddedFlag, flagSetsOfRemovedFlag); - if (this.flagSetsFilter.length > 0) { - addToFlagSets = addToFlagSets.filter(flagSet => { - return this.flagSetsFilter.some(filterFlagSet => filterFlagSet === flagSet); - }); - } - - const items = [featureFlagName]; - - return Promise.all([ - ...removeFromFlagSets.map(flagSetName => this.wrapper.removeItems(this.keys.buildFlagSetKey(flagSetName), items)), - ...addToFlagSets.map(flagSetName => this.wrapper.addItems(this.keys.buildFlagSetKey(flagSetName), items)) - ]); - } - - /** - * Add a given split. - * The returned promise is resolved when the operation success - * or rejected if it fails (e.g., wrapper operation fails) - */ - addSplit(split: IDefinition): Promise { - const name = split.name; - const splitKey = this.keys.buildSplitKey(name); - return this.wrapper.get(splitKey).then(splitFromStorage => { - - // handling parsing error - let parsedPreviousSplit: IDefinition, stringifiedNewSplit; - try { - parsedPreviousSplit = splitFromStorage ? JSON.parse(splitFromStorage) : undefined; - stringifiedNewSplit = JSON.stringify(split); - } catch (e) { - throw new Error('Error parsing feature flag definition: ' + e); - } - - return this.wrapper.set(splitKey, stringifiedNewSplit).then(() => { - // avoid unnecessary increment/decrement operations - if (parsedPreviousSplit && parsedPreviousSplit.trafficTypeName === split.trafficTypeName) return; - - // update traffic type counts - return this._incrementCounts(split).then(() => { - if (parsedPreviousSplit) return this._decrementCounts(parsedPreviousSplit); - }); - }).then(() => this._updateFlagSets(name, parsedPreviousSplit && parsedPreviousSplit.sets, split.sets)); - }).then(() => true); - } - - /** - * Remove a given split. - * The returned promise is resolved when the operation success, with a boolean indicating if the split existed or not. - * or rejected if it fails (e.g., wrapper operation fails). - */ - removeSplit(name: string) { - return this.getSplit(name).then((split) => { - if (split) { - return this._decrementCounts(split).then(() => this._updateFlagSets(name, split.sets)); - } - }).then(() => { - return this.wrapper.del(this.keys.buildSplitKey(name)); - }); - } - - /** - * Get split. - * The returned promise is resolved with the split definition or null if it's not defined, - * or rejected if wrapper operation fails. - */ - getSplit(name: string): Promise { - return this.wrapper.get(this.keys.buildSplitKey(name)) - .then(maybeSplit => maybeSplit && JSON.parse(maybeSplit)); - } - - /** - * Get list of splits. - * The returned promise is resolved with a map of split names to their split definition or null if it's not defined, - * or rejected if wrapper operation fails. - */ - getSplits(names: string[]): Promise> { - const keys = names.map(name => this.keys.buildSplitKey(name)); - - return this.wrapper.getMany(keys).then(splitDefinitions => { - const splits: Record = {}; - names.forEach((name, idx) => { - const split = splitDefinitions[idx]; - splits[name] = split && JSON.parse(split); - }); - return Promise.resolve(splits); - }); - } - - /** - * Get list of all split definitions. - * The returned promise is resolved with the list of split definitions, - * or rejected if wrapper operation fails. - */ - getAll(): Promise { - return this.wrapper.getKeysByPrefix(this.keys.buildSplitKeyPrefix()) - .then((listOfKeys) => this.wrapper.getMany(listOfKeys)) - .then((splitDefinitions) => splitDefinitions.map((splitDefinition) => { - return JSON.parse(splitDefinition as string); - })); - } - - /** - * Get list of split names. - * The returned promise is resolved with the list of split names, - * or rejected if wrapper operation fails. - */ - getSplitNames(): Promise { - return this.wrapper.getKeysByPrefix(this.keys.buildSplitKeyPrefix()).then( - (listOfKeys) => listOfKeys.map(this.keys.extractKey) - ); - } - - /** - * Get list of feature flag names related to a given list of flag set names. - * The returned promise is resolved with the list of feature flag names per flag set. - * It never rejects (If there is a wrapper error for some flag set, an empty set is returned for it). - */ - getNamesByFlagSets(flagSets: string[]): Promise[]> { - return Promise.all(flagSets.map(flagSet => { - const flagSetKey = this.keys.buildFlagSetKey(flagSet); - return this.wrapper.getItems(flagSetKey).catch(() => []); - })).then(namesByFlagSets => namesByFlagSets.map(namesByFlagSet => new Set(namesByFlagSet))); - } - - /** - * Check traffic type existence. - * The returned promise is resolved with a boolean indicating whether the TT exist or not. - * In case of wrapper operation failures, the promise resolves with a true value, assuming that the TT might exist. - * It will never be rejected. - */ - trafficTypeExists(trafficType: string): Promise { - // If there is a number there should be > 0, otherwise the TT is considered as not existent. - return this.wrapper.get(this.keys.buildTrafficTypeKey(trafficType)) - .then((ttCount: string | null | number) => { - if (ttCount === null) return false; // if entry doesn't exist, means that TT doesn't exist - - ttCount = parseInt(ttCount as string, 10); - if (!isFiniteNumber(ttCount) || ttCount < 0) { - this.log.info(LOG_PREFIX + `Could not validate traffic type existence of ${trafficType} due to data corruption of some sorts.`); - return false; - } - - return ttCount > 0; - }).catch(e => { - this.log.error(LOG_PREFIX + `Could not validate traffic type existence of ${trafficType} due to an error: ${e}.`); - // If there is an error, bypass the validation so the event can get tracked. - return true; - }); - } - - /** - * Set till number. - * The returned promise is resolved when the operation success, - * or rejected if it fails (e.g., wrapper operation fails). - */ - setChangeNumber(changeNumber: number) { - return this.wrapper.set(this.keys.buildSplitsTillKey(), changeNumber + ''); - } - - /** - * Get till number or -1 if it's not defined. - * The returned promise is resolved with the changeNumber or -1 if it doesn't exist or a wrapper operation fails. - * The promise will never be rejected. - */ - getChangeNumber(): Promise { - return this.wrapper.get(this.keys.buildSplitsTillKey()).then((value) => { - const i = parseInt(value as string, 10); - - return isNaNNumber(i) ? -1 : i; - }).catch((e) => { - this.log.error(LOG_PREFIX + 'Could not retrieve changeNumber from storage. Error: ' + e); - return -1; - }); - } - - // @TODO implement if required by DataLoader or producer mode - clear() { - return Promise.resolve(true); - } - -} diff --git a/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts b/src/storages/pluggable/__tests__/DefinitionsCachePluggable.spec.ts similarity index 52% rename from src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts rename to src/storages/pluggable/__tests__/DefinitionsCachePluggable.spec.ts index 55b063b5..ec38b7fc 100644 --- a/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts +++ b/src/storages/pluggable/__tests__/DefinitionsCachePluggable.spec.ts @@ -1,4 +1,4 @@ -import { SplitsCachePluggable } from '../SplitsCachePluggable'; +import { DefinitionsCachePluggable } from '../DefinitionsCachePluggable'; import { KeyBuilder } from '../../KeyBuilder'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { wrapperMockFactory } from './wrapper.mock'; @@ -7,10 +7,10 @@ import { IDefinition } from '../../../dtos/types'; const keysBuilder = new KeyBuilder(); -describe('SPLITS CACHE PLUGGABLE', () => { +describe('DEFINITIONS CACHE PLUGGABLE', () => { test('add/remove/get splits', async () => { - const cache = new SplitsCachePluggable(loggerMock, keysBuilder, wrapperMockFactory()); + const cache = new DefinitionsCachePluggable(loggerMock, keysBuilder, wrapperMockFactory()); await cache.update([splitWithUserTT, splitWithAccountTT], [], -1); @@ -18,103 +18,103 @@ describe('SPLITS CACHE PLUGGABLE', () => { expect(values).toEqual([splitWithUserTT, splitWithAccountTT]); - // Assert getSplits - let valuesObj = await cache.getSplits([splitWithUserTT.name, splitWithAccountTT.name]); + // Assert getDefinitions + let valuesObj = await cache.getMany([splitWithUserTT.name, splitWithAccountTT.name]); expect(valuesObj).toEqual(values.reduce>((acc, split) => { acc[split.name] = split; return acc; }, {})); - // Assert getSplitNames - let splitNames = await cache.getSplitNames(); + // Assert getDefinitionNames + let splitNames = await cache.getNames(); expect(splitNames.length).toBe(2); expect(splitNames.indexOf('user_ff') !== -1).toBe(true); expect(splitNames.indexOf('account_ff') !== -1).toBe(true); - await cache.removeSplit('user_ff'); + await cache.remove('user_ff'); values = await cache.getAll(); expect(values).toEqual([splitWithAccountTT]); - expect(await cache.getSplit('user_ff')).toEqual(null); - expect(await cache.getSplit('account_ff')).toEqual(splitWithAccountTT); + expect(await cache.get('user_ff')).toEqual(null); + expect(await cache.get('account_ff')).toEqual(splitWithAccountTT); await cache.setChangeNumber(123); expect(await cache.getChangeNumber()).toBe(123); - splitNames = await cache.getSplitNames(); + splitNames = await cache.getNames(); expect(splitNames.indexOf('user_ff') === -1).toBe(true); expect(splitNames.indexOf('account_ff') !== -1).toBe(true); - const splits = await cache.getSplits(['user_ff', 'account_ff']); - expect(splits['user_ff']).toEqual(null); - expect(splits['account_ff']).toEqual(splitWithAccountTT); + const definitions = await cache.getMany(['user_ff', 'account_ff']); + expect(definitions['user_ff']).toEqual(null); + expect(definitions['account_ff']).toEqual(splitWithAccountTT); }); test('trafficTypeExists', async () => { const wrapper = wrapperMockFactory(); - const cache = new SplitsCachePluggable(loggerMock, keysBuilder, wrapper); + const cache = new DefinitionsCachePluggable(loggerMock, keysBuilder, wrapper); await cache.update([ { ...splitWithUserTT, name: 'split1' }, { ...splitWithAccountTT, name: 'split2' }, { ...splitWithUserTT, name: 'split3' }, ], [], -1); - await cache.addSplit({ ...splitWithUserTT, name: 'split4' }); - await cache.addSplit({ ...splitWithUserTT, name: 'split4' }); // trying to add the same definition for an already added split will not have effect + await cache.add({ ...splitWithUserTT, name: 'split4' }); + await cache.add({ ...splitWithUserTT, name: 'split4' }); // trying to add the same definition for an already added split will not have effect expect(await cache.trafficTypeExists('user_tt')).toBe(true); expect(await cache.trafficTypeExists('account_tt')).toBe(true); expect(await cache.trafficTypeExists('not_existent_tt')).toBe(false); - await cache.removeSplit('split4'); + await cache.remove('split4'); expect(await cache.trafficTypeExists('user_tt')).toBe(true); expect(await cache.trafficTypeExists('account_tt')).toBe(true); expect(await wrapper.get(keysBuilder.buildTrafficTypeKey('account_tt'))).toBe('1'); - await cache.removeSplit('split3'); - await cache.removeSplit('split2'); + await cache.remove('split3'); + await cache.remove('split2'); expect(await cache.trafficTypeExists('user_tt')).toBe(true); expect(await cache.trafficTypeExists('account_tt')).toBe(false); expect(await wrapper.get(keysBuilder.buildTrafficTypeKey('account_tt'))).toBe(null); // TT entry should be removed in the wrapper - await cache.removeSplit('split1'); + await cache.remove('split1'); expect(await cache.trafficTypeExists('user_tt')).toBe(false); expect(await cache.trafficTypeExists('account_tt')).toBe(false); - await cache.addSplit({ ...splitWithUserTT, name: 'split1' }); + await cache.add({ ...splitWithUserTT, name: 'split1' }); expect(await cache.trafficTypeExists('user_tt')).toBe(true); - await cache.addSplit({ ...splitWithAccountTT, name: 'split1' }); + await cache.add({ ...splitWithAccountTT, name: 'split1' }); expect(await cache.trafficTypeExists('account_tt')).toBe(true); expect(await cache.trafficTypeExists('user_tt')).toBe(false); }); test('killLocally', async () => { const wrapper = wrapperMockFactory(); - const cache = new SplitsCachePluggable(loggerMock, keysBuilder, wrapper); + const cache = new DefinitionsCachePluggable(loggerMock, keysBuilder, wrapper); await cache.update([splitWithUserTT, splitWithAccountTT], [], -1); const initialChangeNumber = await cache.getChangeNumber(); // kill an non-existent split let updated = await cache.killLocally('nonexistent_split', 'other_treatment', 101); - const nonexistentSplit = await cache.getSplit('nonexistent_split'); + const nonexistentSplit = await cache.get('nonexistent_split'); expect(updated).toBe(false); // killLocally resolves without update if split doesn't exist expect(nonexistentSplit).toBe(null); // non-existent split keeps being non-existent // kill an existent split updated = await cache.killLocally('user_ff', 'some_treatment', 100); - let lol1Split = await cache.getSplit('user_ff') as IDefinition; + let lol1Split = await cache.get('user_ff') as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -124,20 +124,20 @@ describe('SPLITS CACHE PLUGGABLE', () => { // not update if changeNumber is old updated = await cache.killLocally('user_ff', 'some_treatment_2', 90); - lol1Split = await cache.getSplit('user_ff') as IDefinition; + lol1Split = await cache.get('user_ff') as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older // Delete splits and TT keys await cache.update([], [splitWithUserTT.name, splitWithAccountTT.name], -1); - await wrapper.del(keysBuilder.buildSplitsTillKey()); + await wrapper.del(keysBuilder.buildDefinitionsTillKey()); expect(await wrapper.getKeysByPrefix('SPLITIO')).toHaveLength(0); }); test('flag set cache tests', async () => { const wrapper = wrapperMockFactory(); // @ts-ignore - const cache = new SplitsCachePluggable(loggerMock, keysBuilder, wrapper, { groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); + const cache = new DefinitionsCachePluggable(loggerMock, keysBuilder, wrapper, { groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); const emptySet = new Set([]); await cache.update([ @@ -145,42 +145,42 @@ describe('SPLITS CACHE PLUGGABLE', () => { featureFlagTwo, featureFlagThree, ], [], -1); - await cache.addSplit(featureFlagWithEmptyFS); + await cache.add(featureFlagWithEmptyFS); - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); - expect(await cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); - expect(await cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(await cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesBySets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cache.getNamesBySets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesBySets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesBySets(['t'])).toEqual([emptySet]); // 't' not in filter + expect(await cache.getNamesBySets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); - await cache.addSplit({ ...featureFlagOne, sets: ['1'] }); + await cache.add({ ...featureFlagOne, sets: ['1'] }); - expect(await cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); - expect(await cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); + expect(await cache.getNamesBySets(['1'])).toEqual([emptySet]); // '1' not in filter + expect(await cache.getNamesBySets(['o'])).toEqual([new Set(['ff_two'])]); + expect(await cache.getNamesBySets(['n'])).toEqual([emptySet]); - await cache.addSplit({ ...featureFlagOne, sets: ['x'] }); - expect(await cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); + await cache.add({ ...featureFlagOne, sets: ['x'] }); + expect(await cache.getNamesBySets(['x'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesBySets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); // Simulate one error in getItems wrapper.getItems.mockImplementationOnce(() => Promise.reject('error')); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, new Set(['ff_three']), new Set(['ff_one'])]); + expect(await cache.getNamesBySets(['o', 'e', 'x'])).toEqual([emptySet, new Set(['ff_three']), new Set(['ff_one'])]); - await cache.removeSplit(featureFlagOne.name); - expect(await cache.getNamesByFlagSets(['x'])).toEqual([emptySet]); + await cache.remove(featureFlagOne.name); + expect(await cache.getNamesBySets(['x'])).toEqual([emptySet]); - await cache.removeSplit(featureFlagOne.name); - expect(await cache.getNamesByFlagSets(['y'])).toEqual([emptySet]); // 'y' not in filter - expect(await cache.getNamesByFlagSets([])).toEqual([]); + await cache.remove(featureFlagOne.name); + expect(await cache.getNamesBySets(['y'])).toEqual([emptySet]); // 'y' not in filter + expect(await cache.getNamesBySets([])).toEqual([]); - await cache.addSplit({ ...featureFlagWithoutFS, name: featureFlagWithEmptyFS.name }); - expect(await cache.getNamesByFlagSets([])).toEqual([]); + await cache.add({ ...featureFlagWithoutFS, name: featureFlagWithEmptyFS.name }); + expect(await cache.getNamesBySets([])).toEqual([]); }); // if FlagSets filter is not defined, it should store all FlagSets in memory. test('flag set cache tests without filters', async () => { - const cacheWithoutFilters = new SplitsCachePluggable(loggerMock, keysBuilder, wrapperMockFactory()); + const cacheWithoutFilters = new DefinitionsCachePluggable(loggerMock, keysBuilder, wrapperMockFactory()); const emptySet = new Set([]); await cacheWithoutFilters.update([ @@ -188,14 +188,14 @@ describe('SPLITS CACHE PLUGGABLE', () => { featureFlagTwo, featureFlagThree ], [], -1); - await cacheWithoutFilters.addSplit(featureFlagWithEmptyFS); - - expect(await cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); + await cacheWithoutFilters.add(featureFlagWithEmptyFS); + + expect(await cacheWithoutFilters.getNamesBySets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cacheWithoutFilters.getNamesBySets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cacheWithoutFilters.getNamesBySets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesBySets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesBySets(['y'])).toEqual([emptySet]); + expect(await cacheWithoutFilters.getNamesBySets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); }); }); diff --git a/src/storages/pluggable/__tests__/index.spec.ts b/src/storages/pluggable/__tests__/index.spec.ts index a0f32b1d..94061677 100644 --- a/src/storages/pluggable/__tests__/index.spec.ts +++ b/src/storages/pluggable/__tests__/index.spec.ts @@ -36,11 +36,11 @@ describe('PLUGGABLE STORAGE', () => { const sharedOnReadyCb = jest.fn(); const sharedStorage = storage.shared('some_key', sharedOnReadyCb); assertStorageInterface(sharedStorage); - expect(sharedStorage.splits).toBe(storage.splits); + expect(sharedStorage.definitions).toBe(storage.definitions); expect(wrapperMock.connect).toBeCalledTimes(1); // wrapper connect method should be called once - expect(await storage.splits.getSplit('some_split')).toBe(null); - expect(await sharedStorage.splits.getSplit('some_split')).toBe(null); + expect(await storage.definitions.get('some_split')).toBe(null); + expect(await sharedStorage.definitions.get('some_split')).toBe(null); expect(wrapperMock.get).toBeCalledTimes(2); expect(wrapperMock.get).toBeCalledWith(`${prefix}.SPLITIO.split.some_split`); // keys prefix should be the provided one diff --git a/src/storages/pluggable/index.ts b/src/storages/pluggable/index.ts index 7f020fa6..84a16619 100644 --- a/src/storages/pluggable/index.ts +++ b/src/storages/pluggable/index.ts @@ -1,7 +1,7 @@ import { IPluggableStorageWrapper, IStorageAsync, IStorageAsyncFactory, IStorageFactoryParams, ITelemetryCacheAsync } from '../types'; import { KeyBuilderSS } from '../KeyBuilderSS'; -import { SplitsCachePluggable } from './SplitsCachePluggable'; +import { DefinitionsCachePluggable } from './DefinitionsCachePluggable'; import { SegmentsCachePluggable } from './SegmentsCachePluggable'; import { ImpressionsCachePluggable } from './ImpressionsCachePluggable'; import { EventsCachePluggable } from './EventsCachePluggable'; @@ -118,7 +118,7 @@ export function PluggableStorage(options: PluggableStorageOptions): IStorageAsyn }); return { - splits: new SplitsCachePluggable(log, keys, wrapper, settings.sync.__splitFiltersValidation), + definitions: new DefinitionsCachePluggable(log, keys, wrapper, settings.sync.__splitFiltersValidation), rbSegments: new RBSegmentsCachePluggable(log, keys, wrapper), segments: new SegmentsCachePluggable(log, keys, wrapper), impressions: isPartialConsumer ? new ImpressionsCacheInMemory(impressionsQueueSize) : new ImpressionsCachePluggable(log, keys.buildImpressionsKey(), wrapper, metadata), diff --git a/src/storages/setRolloutPlan.ts b/src/storages/setRolloutPlan.ts index b92a1d8a..699d1e01 100644 --- a/src/storages/setRolloutPlan.ts +++ b/src/storages/setRolloutPlan.ts @@ -1,5 +1,5 @@ import SplitIO from '../../types/splitio'; -import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync } from './types'; +import { IRBSegmentsCacheSync, ISegmentsCacheSync, IDefinitionsCacheSync } from './types'; import { ILogger } from '../logger/types'; import { isObject } from '../utils/lang'; import { isConsumerMode } from '../utils/settingsValidation/mode'; @@ -27,15 +27,15 @@ export function validateRolloutPlan(log: ILogger, settings: SplitIO.ISettings): * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). */ -export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { - const { splits, rbSegments, segments, largeSegments } = storage; +export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { definitions?: IDefinitionsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { + const { definitions, rbSegments, segments, largeSegments } = storage; const { splitChanges: { ff, rbs } } = rolloutPlan; log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); - if (splits && ff) { - splits.clear(); - splits.update(ff.d || [], [], ff.t); + if (definitions && ff) { + definitions.clear(); + definitions.update(ff.d || [], [], ff.t); } if (rbSegments && rbs) { diff --git a/src/storages/types.ts b/src/storages/types.ts index 9d51bd1e..1aa9f76a 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -194,49 +194,49 @@ export interface IPluggableStorageWrapper { /** Splits cache */ -export interface ISplitsCacheBase { +export interface IDefinitionsCacheBase { update(toAdd: IDefinition[], toRemove: string[], changeNumber: number): MaybeThenable, - getSplit(name: string): MaybeThenable, - getSplits(names: string[]): MaybeThenable>, // `fetchMany` in spec + get(name: string): MaybeThenable, + getMany(names: string[]): MaybeThenable>, // `fetchMany` in spec // should never reject or throw an exception. Instead return -1 by default, assuming no splits are present in the storage. getChangeNumber(): MaybeThenable, getAll(): MaybeThenable, - getSplitNames(): MaybeThenable, + getNames(): MaybeThenable, // should never reject or throw an exception. Instead return true by default, asssuming the TT might exist. trafficTypeExists(trafficType: string): MaybeThenable, // only for Client-Side. Returns true if the storage is not synchronized yet (getChangeNumber() === -1) or contains a FF using segments or large segments usesSegments(): MaybeThenable, clear(): MaybeThenable, killLocally(name: string, defaultTreatment: string, changeNumber: number): MaybeThenable, - getNamesByFlagSets(flagSets: string[]): MaybeThenable[]> + getNamesBySets(sets: string[]): MaybeThenable[]> } -export interface ISplitsCacheSync extends ISplitsCacheBase { +export interface IDefinitionsCacheSync extends IDefinitionsCacheBase { update(toAdd: IDefinition[], toRemove: string[], changeNumber: number): boolean, - getSplit(name: string): IDefinition | null, - getSplits(names: string[]): Record, + get(name: string): IDefinition | null, + getMany(names: string[]): Record, getChangeNumber(): number, getAll(): IDefinition[], - getSplitNames(): string[], + getNames(): string[], trafficTypeExists(trafficType: string): boolean, usesSegments(): boolean, clear(): void, killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean, - getNamesByFlagSets(flagSets: string[]): Set[] + getNamesBySets(sets: string[]): Set[] } -export interface ISplitsCacheAsync extends ISplitsCacheBase { +export interface IDefinitionsCacheAsync extends IDefinitionsCacheBase { update(toAdd: IDefinition[], toRemove: string[], changeNumber: number): Promise, - getSplit(name: string): Promise, - getSplits(names: string[]): Promise>, + get(name: string): Promise, + getMany(names: string[]): Promise>, getChangeNumber(): Promise, getAll(): Promise, - getSplitNames(): Promise, + getNames(): Promise, trafficTypeExists(trafficType: string): Promise, usesSegments(): Promise, clear(): Promise, killLocally(name: string, defaultTreatment: string, changeNumber: number): Promise, - getNamesByFlagSets(flagSets: string[]): Promise[]> + getNamesBySets(sets: string[]): Promise[]> } /** Rule-Based Segments cache */ @@ -466,7 +466,7 @@ export interface ITelemetryCacheAsync extends ITelemetryEvaluationProducerAsync, */ export interface IStorageBase< - TSplitsCache extends ISplitsCacheBase = ISplitsCacheBase, + TDefinitionsCache extends IDefinitionsCacheBase = IDefinitionsCacheBase, TRBSegmentsCache extends IRBSegmentsCacheBase = IRBSegmentsCacheBase, TSegmentsCache extends ISegmentsCacheBase = ISegmentsCacheBase, TImpressionsCache extends IImpressionsCacheBase = IImpressionsCacheBase, @@ -475,7 +475,7 @@ export interface IStorageBase< TTelemetryCache extends ITelemetryCacheSync | ITelemetryCacheAsync = ITelemetryCacheSync | ITelemetryCacheAsync, TUniqueKeysCache extends IUniqueKeysCacheBase = IUniqueKeysCacheBase > { - splits: TSplitsCache, + definitions: TDefinitionsCache, rbSegments: TRBSegmentsCache, segments: TSegmentsCache, largeSegments?: TSegmentsCache, @@ -490,7 +490,7 @@ export interface IStorageBase< } export interface IStorageSync extends IStorageBase< - ISplitsCacheSync, + IDefinitionsCacheSync, IRBSegmentsCacheSync, ISegmentsCacheSync, IImpressionsCacheSync, @@ -505,7 +505,7 @@ export interface IStorageSync extends IStorageBase< } export interface IStorageAsync extends IStorageBase< - ISplitsCacheAsync, + IDefinitionsCacheAsync, IRBSegmentsCacheAsync, ISegmentsCacheAsync, IImpressionsCacheAsync | IImpressionsCacheSync, diff --git a/src/sync/__tests__/syncManagerOnline.spec.ts b/src/sync/__tests__/syncManagerOnline.spec.ts index 902a0d93..be604aa3 100644 --- a/src/sync/__tests__/syncManagerOnline.spec.ts +++ b/src/sync/__tests__/syncManagerOnline.spec.ts @@ -2,7 +2,7 @@ import { fullSettings } from '../../utils/settingsValidation/__tests__/settings. import { syncTaskFactory } from './syncTask.mock'; import { syncManagerOnlineFactory } from '../syncManagerOnline'; import { IReadinessManager } from '../../readiness/types'; -import { SDK_SPLITS_CACHE_LOADED } from '../../readiness/constants'; +import { SDK_DEFINITIONS_CACHE_LOADED } from '../../readiness/constants'; jest.mock('../submitters/submitterManager', () => { return { @@ -17,7 +17,7 @@ const splitApiMock = { // Mocked storageManager const storageManagerMock = { - splits: { + definitions: { usesSegments: () => false } }; @@ -191,19 +191,19 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', asy }); -test('syncManagerOnline should emit SDK_SPLITS_CACHE_LOADED if validateCache returns false', async () => { +test('syncManagerOnline should emit SDK_DEFINITIONS_CACHE_LOADED if validateCache returns false', async () => { const lastUpdateTimestamp = Date.now() - 1000 * 60 * 60; // 1 hour ago const params = { settings: fullSettings, storage: { validateCache: () => Promise.resolve({ initialCacheLoad: false, lastUpdateTimestamp }) }, - readiness: { splits: { emit: jest.fn() } }, + readiness: { definitions: { emit: jest.fn() } }, splitApi: splitApiMock, }; // @ts-ignore const syncManager = syncManagerOnlineFactory()(params); await syncManager.start(); - expect(params.readiness.splits.emit).toBeCalledWith(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp }); + expect(params.readiness.definitions.emit).toBeCalledWith(SDK_DEFINITIONS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp }); syncManager.stop(); }); diff --git a/src/sync/offline/syncTasks/fromObjectSyncTask.ts b/src/sync/offline/syncTasks/fromObjectSyncTask.ts index 14f431ea..81f172d7 100644 --- a/src/sync/offline/syncTasks/fromObjectSyncTask.ts +++ b/src/sync/offline/syncTasks/fromObjectSyncTask.ts @@ -7,7 +7,7 @@ import { syncTaskFactory } from '../../syncTask'; import { ISyncTask } from '../../types'; import { ISettings } from '../../../types'; import { CONTROL } from '../../../utils/constants'; -import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_SPLITS_CACHE_LOADED, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../../../readiness/constants'; +import { SDK_DEFINITIONS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_DEFINITIONS_CACHE_LOADED, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../../../readiness/constants'; import { SYNC_OFFLINE_DATA, ERROR_SYNC_OFFLINE_LOADING } from '../../../logger/constants'; /** @@ -15,12 +15,12 @@ import { SYNC_OFFLINE_DATA, ERROR_SYNC_OFFLINE_LOADING } from '../../../logger/c */ export function fromObjectUpdaterFactory( splitsParser: IDefinitionsParser, - storage: Pick, + storage: Pick, readiness: IReadinessManager, settings: ISettings, ): () => Promise { - const log = settings.log, splitsCache = storage.splits; + const log = settings.log, definitions = storage.definitions; let startingUp = true; return function objectUpdater() { @@ -52,17 +52,17 @@ export function fromObjectUpdaterFactory( }); return Promise.all([ - splitsCache.clear(), // required to sync removed splits from mock - splitsCache.update(splits, [], Date.now()) + definitions.clear(), // required to sync removed splits from mock + definitions.update(splits, [], Date.now()) ]).then(() => { - readiness.splits.emit(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: [] }); + readiness.definitions.emit(SDK_DEFINITIONS_ARRIVED, { type: FLAGS_UPDATE, names: [] }); if (startingUp) { startingUp = false; Promise.resolve(storage.validateCache ? storage.validateCache() : { initialCacheLoad: true /* Fallback: assume initial load when validateCache doesn't exist */ }).then((cacheMetadata) => { // Emits SDK_READY_FROM_CACHE if (!cacheMetadata.initialCacheLoad) { - readiness.splits.emit(SDK_SPLITS_CACHE_LOADED, cacheMetadata); + readiness.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, cacheMetadata); } // Emits SDK_READY readiness.segments.emit(SDK_SEGMENTS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); @@ -81,7 +81,7 @@ export function fromObjectUpdaterFactory( */ export function fromObjectSyncTaskFactory( splitsParser: IDefinitionsParser, - storage: Pick, + storage: Pick, readiness: IReadinessManager, settings: ISettings ): ISyncTask<[], boolean> { diff --git a/src/sync/polling/fetchers/splitChangesFetcher.ts b/src/sync/polling/fetchers/splitChangesFetcher.ts index 82e3763d..c26fdfc6 100644 --- a/src/sync/polling/fetchers/splitChangesFetcher.ts +++ b/src/sync/polling/fetchers/splitChangesFetcher.ts @@ -102,7 +102,7 @@ export function splitChangesFetcherFactory(params: ISdkFactoryContextSync): IDef lastProxyCheckTimestamp = undefined; return splitChangesFetcher(-1, undefined, undefined, -1) .then((splitChangesResponse: IDefinitionChangesResponse) => - Promise.all([storage.splits.clear(), storage.rbSegments.clear()]) + Promise.all([storage.definitions.clear(), storage.rbSegments.clear()]) .then(() => splitChangesResponse) ); } diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 05895aac..d7679b6b 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -5,10 +5,10 @@ import { IStorageSync } from '../../storages/types'; import { mySegmentsSyncTaskFactory } from './syncTasks/mySegmentsSyncTask'; import { definitionsSyncTaskFactory } from './syncTasks/definitionsSyncTask'; import { getMatching } from '../../utils/key'; -import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../readiness/constants'; +import { SDK_DEFINITIONS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../readiness/constants'; import { POLLING_SMART_PAUSING, POLLING_START, POLLING_STOP } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; -import { usesSegmentsSync } from '../../storages/AbstractSplitsCacheSync'; +import { usesSegmentsSync } from '../../storages/AbstractDefinitionsCacheSync'; import { SdkUpdateMetadata } from '../../../types/splitio'; import { IDefinitionChangesFetcher } from './fetchers/types'; @@ -45,7 +45,7 @@ export function pollingManagerCSFactory( } // smart pausing - readiness.splits.on(SDK_SPLITS_ARRIVED, () => { + readiness.definitions.on(SDK_DEFINITIONS_ARRIVED, () => { if (!definitionsSyncTask.isRunning()) return; // noop if not doing polling const usingSegments = usesSegmentsSync(storage); if (usingSegments !== mySegmentsSyncTask.isRunning()) { @@ -66,7 +66,7 @@ export function pollingManagerCSFactory( if (!readiness.isReady() && !usesSegmentsSync(storage)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED, metadata); } if (!usesSegmentsSync(storage)) setTimeout(smartReady, 0); - else readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); + else readiness.definitions.once(SDK_DEFINITIONS_ARRIVED, smartReady); mySegmentsSyncTasks[matchingKey] = mySegmentsSyncTask; return mySegmentsSyncTask; diff --git a/src/sync/polling/syncTasks/definitionsSyncTask.ts b/src/sync/polling/syncTasks/definitionsSyncTask.ts index 6e107211..aac2a112 100644 --- a/src/sync/polling/syncTasks/definitionsSyncTask.ts +++ b/src/sync/polling/syncTasks/definitionsSyncTask.ts @@ -23,7 +23,7 @@ export function definitionsSyncTaskFactory( definitionChangesFetcher, storage, settings.sync.__splitFiltersValidation, - readiness.splits, + readiness.definitions, settings.startup.requestTimeoutBeforeReady, settings.startup.retriesOnFailureBeforeReady, isClientSide diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index 2f8e0f0a..f438eb2b 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -4,7 +4,7 @@ import { IStorageSync } from '../../storages/types'; import { MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../streaming/types'; import { ITask, ISyncTask } from '../types'; -export interface IDefinitionsSyncTask extends ISyncTask<[noCache?: boolean, till?: number, splitUpdateNotification?: { payload: IDefinition | IRBSegment, changeNumber: number }], boolean> { } +export interface IDefinitionsSyncTask extends ISyncTask<[noCache?: boolean, till?: number, definitionUpdateNotification?: { payload: IDefinition | IRBSegment, changeNumber: number }], boolean> { } export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> { } diff --git a/src/sync/polling/updaters/__tests__/definitionChangesUpdater.spec.ts b/src/sync/polling/updaters/__tests__/definitionChangesUpdater.spec.ts index 2b119679..28de61b8 100644 --- a/src/sync/polling/updaters/__tests__/definitionChangesUpdater.spec.ts +++ b/src/sync/polling/updaters/__tests__/definitionChangesUpdater.spec.ts @@ -2,7 +2,7 @@ import { IRBSegment, IDefinition, IDefinitionCondition } from '../../../../dtos/ import { readinessManagerFactory } from '../../../../readiness/readinessManager'; import { splitApiFactory } from '../../../../services/splitApi'; import { SegmentsCacheInMemory } from '../../../../storages/inMemory/SegmentsCacheInMemory'; -import { SplitsCacheInMemory } from '../../../../storages/inMemory/SplitsCacheInMemory'; +import { DefinitionsCacheInMemory } from '../../../../storages/inMemory/DefinitionsCacheInMemory'; import { splitChangesFetcherFactory } from '../../fetchers/splitChangesFetcher'; import { definitionChangesUpdaterFactory, parseSegments, computeMutation } from '../definitionChangesUpdater'; import splitChangesMock1 from '../../../../__tests__/mocks/splitchanges.since.-1.json'; @@ -15,7 +15,7 @@ import { splitNotifications } from '../../../streaming/__tests__/dataMocks'; import { RBSegmentsCacheInMemory } from '../../../../storages/inMemory/RBSegmentsCacheInMemory'; import { RB_SEGMENT_UPDATE, SPLIT_UPDATE } from '../../../streaming/constants'; import { IN_RULE_BASED_SEGMENT } from '../../../../utils/constants'; -import { SDK_SPLITS_ARRIVED, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../../../../readiness/constants'; +import { SDK_DEFINITIONS_ARRIVED, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../../../../readiness/constants'; const ARCHIVED_FF = 'ARCHIVED'; @@ -179,8 +179,8 @@ test('definitionChangesUpdater / compute splits mutation with filters', () => { }); describe('definitionChangesUpdater', () => { - const splits = new SplitsCacheInMemory(); - const updateSplits = jest.spyOn(splits, 'update'); + const definitions = new DefinitionsCacheInMemory(); + const updateSplits = jest.spyOn(definitions, 'update'); const rbSegments = new RBSegmentsCacheInMemory(); const updateRbSegments = jest.spyOn(rbSegments, 'update'); @@ -188,7 +188,7 @@ describe('definitionChangesUpdater', () => { const segments = new SegmentsCacheInMemory(); const registerSegments = jest.spyOn(segments, 'registerSegments'); - const storage = { splits, rbSegments, segments }; + const storage = { definitions, rbSegments, segments }; fetchMock.once('*', { status: 200, body: splitChangesMock1 }); // @ts-ignore const splitApi = splitApiFactory(settingsSplitApi, { getFetch: () => fetchMock }, telemetryTrackerFactory()); @@ -196,11 +196,11 @@ describe('definitionChangesUpdater', () => { const splitChangesFetcher = splitChangesFetcherFactory({ splitApi, settings: fullSettings, storage }); const readinessManager = readinessManagerFactory(EventEmitter, fullSettings); - const splitsEmitSpy = jest.spyOn(readinessManager.splits, 'emit'); + const splitsEmitSpy = jest.spyOn(readinessManager.definitions, 'emit'); let splitFiltersValidation = { queryString: null, groupedFilters: { bySet: [], byName: [], byPrefix: [] }, validFilters: [] }; - let definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1); + let definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.definitions, 1000, 1); afterEach(() => { jest.clearAllMocks(); @@ -216,7 +216,7 @@ describe('definitionChangesUpdater', () => { expect(updateSplits).lastCalledWith(splitChangesMock1.ff.d, [], splitChangesMock1.ff.t); expect(updateRbSegments).toBeCalledTimes(0); // no rbSegments to update expect(registerSegments).toBeCalledTimes(1); - expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: updatedFlags }); + expect(splitsEmitSpy).toBeCalledWith(SDK_DEFINITIONS_ARRIVED, { type: FLAGS_UPDATE, names: updatedFlags }); expect(result).toBe(true); }); @@ -290,22 +290,22 @@ describe('definitionChangesUpdater', () => { { sets: ['set_a'], shouldEmit: true }, /* should emit if flag is back in configured sets */ ]; - definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); + definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.definitions, 1000, 1, true); let index = 0; let calls = 0; // emit always if not configured sets for (const setMock of setMocks) { await expect(definitionChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); - expect(splitsEmitSpy.mock.calls[index][0]).toBe(SDK_SPLITS_ARRIVED); + expect(splitsEmitSpy.mock.calls[index][0]).toBe(SDK_DEFINITIONS_ARRIVED); expect(splitsEmitSpy.mock.calls[index][1]).toEqual({ type: FLAGS_UPDATE, names: [payload.name] }); index++; } // @ts-ignore splitFiltersValidation = { queryString: null, groupedFilters: { bySet: ['set_a'], byName: [], byPrefix: [] }, validFilters: [] }; - storage.splits.clear(); - definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); + storage.definitions.clear(); + definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.definitions, 1000, 1, true); splitsEmitSpy.mockReset(); index = 0; for (const setMock of setMocks) { @@ -320,23 +320,23 @@ describe('definitionChangesUpdater', () => { test('test with ff payload - should emit metadata with flag name', async () => { splitsEmitSpy.mockClear(); - readinessManager.splits.splitsArrived = false; - storage.splits.clear(); + readinessManager.definitions.definitionsArrived = false; + storage.definitions.clear(); const payload = splitNotifications[0].decoded as Pick; const changeNumber = payload.changeNumber; await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); - expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: [payload.name] }); + expect(splitsEmitSpy).toBeCalledWith(SDK_DEFINITIONS_ARRIVED, { type: FLAGS_UPDATE, names: [payload.name] }); }); test('test with multiple flags updated - should emit metadata with all flag names', async () => { splitsEmitSpy.mockClear(); - storage.splits.clear(); + storage.definitions.clear(); storage.segments.clear(); - // Start with splitsArrived = false so it emits on first update - readinessManager.splits.splitsArrived = false; + // Start with definitionsArrived = false so it emits on first update + readinessManager.definitions.definitionsArrived = false; readinessManager.segments.segmentsArrived = true; // Segments ready const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 100, conditions: [] } as unknown as IDefinition; @@ -346,16 +346,16 @@ describe('definitionChangesUpdater', () => { fetchMock.once('*', { status: 200, body: { ff: { d: [flag1, flag2, flag3], t: 102 } } }); await definitionChangesUpdater(); - // Should emit with metadata when splitsArrived is false (first update) - expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: ['flag1', 'flag2', 'flag3'] }); + // Should emit with metadata when definitionsArrived is false (first update) + expect(splitsEmitSpy).toBeCalledWith(SDK_DEFINITIONS_ARRIVED, { type: FLAGS_UPDATE, names: ['flag1', 'flag2', 'flag3'] }); }); test('test with ARCHIVED flag - should emit metadata with flag name', async () => { splitsEmitSpy.mockClear(); - storage.splits.clear(); + storage.definitions.clear(); storage.segments.clear(); - // Start with splitsArrived = false so it emits on first update - readinessManager.splits.splitsArrived = false; + // Start with definitionsArrived = false so it emits on first update + readinessManager.definitions.definitionsArrived = false; readinessManager.segments.segmentsArrived = true; // Segments ready const archivedFlag = { name: 'archived-flag', status: ARCHIVED_FF, changeNumber: 200, conditions: [] } as unknown as IDefinition; @@ -365,13 +365,13 @@ describe('definitionChangesUpdater', () => { await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); - // Should emit with metadata when splitsArrived is false (first update) - expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: [payload.name] }); + // Should emit with metadata when definitionsArrived is false (first update) + expect(splitsEmitSpy).toBeCalledWith(SDK_DEFINITIONS_ARRIVED, { type: FLAGS_UPDATE, names: [payload.name] }); }); test('test with rbsegment payload - should emit SEGMENTS_UPDATE not FLAGS_UPDATE', async () => { splitsEmitSpy.mockClear(); - readinessManager.splits.splitsArrived = true; + readinessManager.definitions.definitionsArrived = true; storage.rbSegments.clear(); const payload = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; @@ -380,13 +380,13 @@ describe('definitionChangesUpdater', () => { await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); // Should emit SEGMENTS_UPDATE (not FLAGS_UPDATE) when only RB segment is updated - expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); + expect(splitsEmitSpy).toBeCalledWith(SDK_DEFINITIONS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); }); test('test with only RB segment update and no flags - should emit SEGMENTS_UPDATE', async () => { splitsEmitSpy.mockClear(); - readinessManager.splits.splitsArrived = true; - storage.splits.clear(); + readinessManager.definitions.definitionsArrived = true; + storage.definitions.clear(); storage.rbSegments.clear(); // Simulate a scenario where only RB segments are updated (no flags) @@ -395,13 +395,13 @@ describe('definitionChangesUpdater', () => { await definitionChangesUpdater(); // When updatedFlags.length === 0, should emit SEGMENTS_UPDATE - expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); + expect(splitsEmitSpy).toBeCalledWith(SDK_DEFINITIONS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); }); test('test with both flags and RB segments updated - should emit FLAGS_UPDATE with flag names', async () => { splitsEmitSpy.mockClear(); - readinessManager.splits.splitsArrived = true; - storage.splits.clear(); + readinessManager.definitions.definitionsArrived = true; + storage.definitions.clear(); storage.rbSegments.clear(); storage.segments.clear(); @@ -414,25 +414,25 @@ describe('definitionChangesUpdater', () => { await definitionChangesUpdater(); // When both flags and RB segments are updated, should emit FLAGS_UPDATE with flag names - expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: ['flag1', 'flag2'] }); + expect(splitsEmitSpy).toBeCalledWith(SDK_DEFINITIONS_ARRIVED, { type: FLAGS_UPDATE, names: ['flag1', 'flag2'] }); }); test('test client-side behavior - should emit even when segments not all fetched', async () => { splitsEmitSpy.mockClear(); - storage.splits.clear(); - // Start with splitsArrived = false so it emits on first update - readinessManager.splits.splitsArrived = false; + storage.definitions.clear(); + // Start with definitionsArrived = false so it emits on first update + readinessManager.definitions.definitionsArrived = false; readinessManager.segments.segmentsArrived = false; // Segments not ready - client-side should still emit // Create client-side updater (isClientSide = true) - const clientSideUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); + const clientSideUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.definitions, 1000, 1, true); const flag1 = { name: 'client-flag', status: 'ACTIVE', changeNumber: 300, conditions: [] } as unknown as IDefinition; fetchMock.once('*', { status: 200, body: { ff: { d: [flag1], t: 300 } } }); await clientSideUpdater(); // Client-side should emit even if segments aren't all fetched (isClientSide bypasses checkAllSegmentsExist) - expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: ['client-flag'] }); + expect(splitsEmitSpy).toBeCalledWith(SDK_DEFINITIONS_ARRIVED, { type: FLAGS_UPDATE, names: ['client-flag'] }); }); }); diff --git a/src/sync/polling/updaters/__tests__/mySegmentsUpdater.spec.ts b/src/sync/polling/updaters/__tests__/mySegmentsUpdater.spec.ts index 2fabc173..dda61c4b 100644 --- a/src/sync/polling/updaters/__tests__/mySegmentsUpdater.spec.ts +++ b/src/sync/polling/updaters/__tests__/mySegmentsUpdater.spec.ts @@ -10,18 +10,18 @@ import { SDK_SEGMENTS_ARRIVED, SEGMENTS_UPDATE } from '../../../../readiness/con import { MySegmentsData } from '../../types'; import { MEMBERSHIPS_MS_UPDATE } from '../../../streaming/constants'; import { IStorageSync } from '../../../../storages/types'; -import { SplitsCacheInMemory } from '../../../../storages/inMemory/SplitsCacheInMemory'; +import { DefinitionsCacheInMemory } from '../../../../storages/inMemory/DefinitionsCacheInMemory'; import { RBSegmentsCacheInMemory } from '../../../../storages/inMemory/RBSegmentsCacheInMemory'; describe('mySegmentsUpdater', () => { const segments = new MySegmentsCacheInMemory(); const largeSegments = new MySegmentsCacheInMemory(); - const splits = new SplitsCacheInMemory(); + const definitions = new DefinitionsCacheInMemory(); const rbSegments = new RBSegmentsCacheInMemory(); const storage: IStorageSync = { segments, largeSegments, - splits, + definitions, rbSegments, impressions: {} as any, events: {} as any, diff --git a/src/sync/polling/updaters/definitionChangesUpdater.ts b/src/sync/polling/updaters/definitionChangesUpdater.ts index 6653d585..62d7d0ce 100644 --- a/src/sync/polling/updaters/definitionChangesUpdater.ts +++ b/src/sync/polling/updaters/definitionChangesUpdater.ts @@ -1,9 +1,9 @@ import { ISegmentsCacheBase, IStorageBase } from '../../../storages/types'; import { IDefinitionChangesFetcher } from '../fetchers/types'; import { IRBSegment, IDefinition, IDefinitionChangesResponse, ISplitFiltersValidation, MaybeThenable } from '../../../dtos/types'; -import { ISplitsEventEmitter } from '../../../readiness/types'; +import { IDefinitionsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; -import { SDK_SPLITS_ARRIVED, FLAGS_UPDATE, SEGMENTS_UPDATE, CONFIGS_UPDATE } from '../../../readiness/constants'; +import { SDK_DEFINITIONS_ARRIVED, FLAGS_UPDATE, SEGMENTS_UPDATE, CONFIGS_UPDATE } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_FETCH, SYNC_UPDATE, SYNC_FETCH_FAILS, SYNC_FETCH_RETRY } from '../../../logger/constants'; import { startsWith } from '../../../utils/lang'; @@ -128,14 +128,14 @@ function convertInstantUpdateToDefinitionChanges(instantUpdate: InstantUpdate) { export function definitionChangesUpdaterFactory( log: ILogger, definitionChangesFetcher: IDefinitionChangesFetcher, - storage: Pick, + storage: Pick, splitFiltersValidation: ISplitFiltersValidation, - splitsEventEmitter?: ISplitsEventEmitter, + definitionsEventEmitter?: IDefinitionsEventEmitter, requestTimeoutBeforeReady = 0, retriesOnFailureBeforeReady = 0, isClientSide?: boolean ): DefinitionChangesUpdater { - const { splits, rbSegments, segments } = storage; + const { definitions, rbSegments, segments } = storage; let startingUp = true; @@ -183,7 +183,7 @@ export function definitionChangesUpdaterFactory( const { added, removed, names } = computeMutation(definitionChanges.d, usedSegments, splitFiltersValidation); updatedDefinitions = names; log.debug(SYNC_UPDATE, [definitionChangesFetcher.type, added.length, removed.length]); - ffUpdate = splits.update(added, removed, definitionChanges.d.till); + ffUpdate = definitions.update(added, removed, definitionChanges.d.till); } let rbsUpdate: MaybeThenable = false; @@ -201,9 +201,9 @@ export function definitionChangesUpdaterFactory( startingUp = false; - if (splitsEventEmitter) { - // To emit SDK_SPLITS_ARRIVED for server-side SDK, we must check that all registered segments have been fetched - return Promise.resolve(!splitsEventEmitter.splitsArrived || ((ffChanged || rbsChanged) && (isClientSide || checkAllSegmentsExist(segments)))) + if (definitionsEventEmitter) { + // To emit SDK_DEFINITIONS_ARRIVED for server-side SDK, we must check that all registered segments have been fetched + return Promise.resolve(!definitionsEventEmitter.definitionsArrived || ((ffChanged || rbsChanged) && (isClientSide || checkAllSegmentsExist(segments)))) .catch(() => false /** noop. just to handle a possible `checkAllSegmentsExist` rejection, before emitting SDK event */) .then(emitSplitsArrivedEvent => { // emit SDK events @@ -212,7 +212,7 @@ export function definitionChangesUpdaterFactory( type: updatedDefinitions.length > 0 ? definitionChangesFetcher.type === 'configs' ? CONFIGS_UPDATE : FLAGS_UPDATE : SEGMENTS_UPDATE, names: updatedDefinitions.length > 0 ? updatedDefinitions : [] }; - splitsEventEmitter.emit(SDK_SPLITS_ARRIVED, metadata); + definitionsEventEmitter.emit(SDK_DEFINITIONS_ARRIVED, metadata); } return true; }); @@ -234,6 +234,6 @@ export function definitionChangesUpdaterFactory( } // `getChangeNumber` never rejects or throws error - return Promise.all([splits.getChangeNumber(), rbSegments.getChangeNumber()]).then(_definitionChangesUpdater); + return Promise.all([definitions.getChangeNumber(), rbSegments.getChangeNumber()]).then(_definitionChangesUpdater); }; } diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 98679b6c..09253a4a 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -8,7 +8,7 @@ import { SYNC_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; import { IMembershipsResponse } from '../../../dtos/types'; import { MEMBERSHIPS_LS_UPDATE } from '../../streaming/constants'; -import { usesSegmentsSync } from '../../../storages/AbstractSplitsCacheSync'; +import { usesSegmentsSync } from '../../../storages/AbstractDefinitionsCacheSync'; type IMySegmentsUpdater = (segmentsData?: MySegmentsData, noCache?: boolean, till?: number) => Promise diff --git a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/DefinitionsUpdateWorker.ts similarity index 75% rename from src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts rename to src/sync/streaming/UpdateWorkers/DefinitionsUpdateWorker.ts index 58db2806..5a573689 100644 --- a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/DefinitionsUpdateWorker.ts @@ -1,9 +1,9 @@ import { IRBSegment, IDefinition } from '../../../dtos/types'; import { STREAMING_PARSING_SPLIT_UPDATE } from '../../../logger/constants'; import { ILogger } from '../../../logger/types'; -import { SDK_SPLITS_ARRIVED } from '../../../readiness/constants'; -import { ISplitsEventEmitter } from '../../../readiness/types'; -import { IRBSegmentsCacheSync, ISplitsCacheSync, IStorageSync } from '../../../storages/types'; +import { SDK_DEFINITIONS_ARRIVED } from '../../../readiness/constants'; +import { IDefinitionsEventEmitter } from '../../../readiness/types'; +import { IRBSegmentsCacheSync, IDefinitionsCacheSync, IStorageSync } from '../../../storages/types'; import { ITelemetryTracker } from '../../../trackers/types'; import { Backoff } from '../../../utils/Backoff'; import { SPLITS } from '../../../utils/constants'; @@ -16,22 +16,22 @@ import { FETCH_BACKOFF_BASE, FETCH_BACKOFF_MAX_WAIT, FETCH_BACKOFF_MAX_RETRIES } import { IUpdateWorker } from './types'; /** - * SplitsUpdateWorker factory + * DefinitionsUpdateWorker factory */ -export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, definitionsSyncTask: IDefinitionsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData]> & { killSplit(event: ISplitKillData): void } { +export function DefinitionsUpdateWorker(log: ILogger, storage: IStorageSync, definitionsSyncTask: IDefinitionsSyncTask, definitionsEventEmitter: IDefinitionsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData]> & { killDefinition(event: ISplitKillData): void } { - const ff = SplitsUpdateWorker(storage.splits); - const rbs = SplitsUpdateWorker(storage.rbSegments); + const ff = DefinitionsUpdateWorker(storage.definitions); + const rbs = DefinitionsUpdateWorker(storage.rbSegments); - function SplitsUpdateWorker(cache: ISplitsCacheSync | IRBSegmentsCacheSync) { + function DefinitionsUpdateWorker(cache: IDefinitionsCacheSync | IRBSegmentsCacheSync) { let maxChangeNumber = -1; let handleNewEvent = false; let isHandlingEvent: boolean; let cdnBypass: boolean; let instantUpdate: InstantUpdate | undefined; - const backoff = new Backoff(__handleSplitUpdateCall, FETCH_BACKOFF_BASE, FETCH_BACKOFF_MAX_WAIT); + const backoff = new Backoff(__handleDefinitionUpdateCall, FETCH_BACKOFF_BASE, FETCH_BACKOFF_MAX_WAIT); - function __handleSplitUpdateCall() { + function __handleDefinitionUpdateCall() { isHandlingEvent = true; if (maxChangeNumber > cache.getChangeNumber()) { handleNewEvent = false; @@ -39,7 +39,7 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, definiti definitionsSyncTask.execute(true, cdnBypass ? maxChangeNumber : undefined, instantUpdate).then(() => { if (!isHandlingEvent) return; // halt if `stop` has been called if (handleNewEvent) { - __handleSplitUpdateCall(); + __handleDefinitionUpdateCall(); } else { if (instantUpdate) telemetryTracker.trackUpdatesFromSSE(SPLITS); // fetch new registered segments for server-side API. Not retrying on error @@ -64,7 +64,7 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, definiti } else { backoff.reset(); cdnBypass = true; - __handleSplitUpdateCall(); + __handleDefinitionUpdateCall(); } } }); @@ -93,7 +93,7 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, definiti instantUpdate = { payload, changeNumber, type }; } - if (backoff.timeoutID || !isHandlingEvent) __handleSplitUpdateCall(); + if (backoff.timeoutID || !isHandlingEvent) __handleDefinitionUpdateCall(); backoff.reset(); }, stop() { @@ -128,10 +128,10 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, definiti * @param splitName - name of split to kill * @param defaultTreatment - default treatment value */ - killSplit({ changeNumber, splitName, defaultTreatment }: ISplitKillData) { - if (storage.splits.killLocally(splitName, defaultTreatment, changeNumber)) { + killDefinition({ changeNumber, splitName, defaultTreatment }: ISplitKillData) { + if (storage.definitions.killLocally(splitName, defaultTreatment, changeNumber)) { // trigger an SDK_UPDATE if Split was killed locally - splitsEventEmitter.emit(SDK_SPLITS_ARRIVED, true); + definitionsEventEmitter.emit(SDK_DEFINITIONS_ARRIVED, true); } // queues the SplitChanges fetch (only if changeNumber is newer) ff.put({ changeNumber } as ISplitUpdateData); diff --git a/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/DefinitionsUpdateWorker.spec.ts similarity index 69% rename from src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts rename to src/sync/streaming/UpdateWorkers/__tests__/DefinitionsUpdateWorker.spec.ts index 2bcffbd4..f1103c2b 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/DefinitionsUpdateWorker.spec.ts @@ -1,8 +1,8 @@ // @ts-nocheck -import { SDK_SPLITS_ARRIVED } from '../../../../readiness/constants'; -import { SplitsCacheInMemory } from '../../../../storages/inMemory/SplitsCacheInMemory'; +import { SDK_DEFINITIONS_ARRIVED } from '../../../../readiness/constants'; +import { DefinitionsCacheInMemory } from '../../../../storages/inMemory/DefinitionsCacheInMemory'; import { RBSegmentsCacheInMemory } from '../../../../storages/inMemory/RBSegmentsCacheInMemory'; -import { SplitsUpdateWorker } from '../SplitsUpdateWorker'; +import { DefinitionsUpdateWorker } from '../DefinitionsUpdateWorker'; import { FETCH_BACKOFF_MAX_RETRIES } from '../constants'; import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; import { syncTaskFactory } from '../../../syncTask'; @@ -10,7 +10,7 @@ import { Backoff } from '../../../../utils/Backoff'; import { splitNotifications } from '../../../streaming/__tests__/dataMocks'; import { telemetryTrackerFactory } from '../../../../trackers/telemetryTracker'; -function splitsSyncTaskMock(splitStorage: SplitsCacheInMemory, changeNumbers = []) { +function splitsSyncTaskMock(splitStorage: DefinitionsCacheInMemory, changeNumbers = []) { const __splitsUpdaterCalls = []; @@ -38,12 +38,12 @@ function splitsSyncTaskMock(splitStorage: SplitsCacheInMemory, changeNumbers = [ }; } -const splitsEventEmitterMock = { +const definitionsEventEmitterMock = { emit: jest.fn(), }; function assertKilledSplit(cache, changeNumber, splitName, defaultTreatment) { - const split = cache.getSplit(splitName); + const split = cache.get(splitName); expect(split.killed).toBe(true); // split must be killed expect(split.defaultTreatment).toBe(defaultTreatment); // split must have the given default treatment expect(split.changeNumber).toBe(changeNumber); // split must have the given change number @@ -52,10 +52,10 @@ function assertKilledSplit(cache, changeNumber, splitName, defaultTreatment) { const telemetryTracker = telemetryTrackerFactory(); // no-op telemetry tracker -describe('SplitsUpdateWorker', () => { +describe('DefinitionsUpdateWorker', () => { const storage = { - splits: new SplitsCacheInMemory(), + definitions: new DefinitionsCacheInMemory(), rbSegments: new RBSegmentsCacheInMemory() }; @@ -63,17 +63,17 @@ describe('SplitsUpdateWorker', () => { Backoff.__TEST__BASE_MILLIS = undefined; Backoff.__TEST__MAX_MILLIS = undefined; - storage.splits.clear(); + storage.definitions.clear(); storage.rbSegments.clear(); }); test('put', async () => { // setup - const splitsSyncTask = splitsSyncTaskMock(storage.splits); + const splitsSyncTask = splitsSyncTaskMock(storage.definitions); Backoff.__TEST__BASE_MILLIS = 1; // retry immediately - const splitUpdateWorker = SplitsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); + const splitUpdateWorker = DefinitionsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); // assert calling `splitsSyncTask.execute` if `isExecuting` is false expect(splitsSyncTask.isExecuting()).toBe(false); @@ -110,8 +110,8 @@ describe('SplitsUpdateWorker', () => { test('put, backoff', async () => { // setup Backoff.__TEST__BASE_MILLIS = 50; - const splitsSyncTask = splitsSyncTaskMock(storage.splits, [90, 90, 90]); - const splitUpdateWorker = SplitsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); + const splitsSyncTask = splitsSyncTaskMock(storage.definitions, [90, 90, 90]); + const splitUpdateWorker = DefinitionsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); // while fetch fails, should retry with backoff splitUpdateWorker.put({ changeNumber: 100 }); @@ -128,8 +128,8 @@ describe('SplitsUpdateWorker', () => { // setup Backoff.__TEST__BASE_MILLIS = 10; // 10 millis instead of 10 sec Backoff.__TEST__MAX_MILLIS = 60; // 60 millis instead of 1 min - const splitsSyncTask = splitsSyncTaskMock(storage.splits, [...Array(FETCH_BACKOFF_MAX_RETRIES).fill(90), 90, 100]); // 12 executions. Last one is valid - const splitUpdateWorker = SplitsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); + const splitsSyncTask = splitsSyncTaskMock(storage.definitions, [...Array(FETCH_BACKOFF_MAX_RETRIES).fill(90), 90, 100]); // 12 executions. Last one is valid + const splitUpdateWorker = DefinitionsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); splitUpdateWorker.put({ changeNumber: 100 }); // queued @@ -152,8 +152,8 @@ describe('SplitsUpdateWorker', () => { // setup Backoff.__TEST__BASE_MILLIS = 10; // 10 millis instead of 10 sec Backoff.__TEST__MAX_MILLIS = 60; // 60 millis instead of 1 min - const splitsSyncTask = splitsSyncTaskMock(storage.splits, Array(FETCH_BACKOFF_MAX_RETRIES * 2).fill(90)); // 20 executions. No one is valid - const splitUpdateWorker = SplitsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); + const splitsSyncTask = splitsSyncTaskMock(storage.definitions, Array(FETCH_BACKOFF_MAX_RETRIES * 2).fill(90)); // 20 executions. No one is valid + const splitUpdateWorker = DefinitionsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); splitUpdateWorker.put({ changeNumber: 100 }); // queued @@ -171,39 +171,39 @@ describe('SplitsUpdateWorker', () => { expect(splitsSyncTask.execute).toBeCalledTimes(1); }); - test('killSplit', async () => { + test('killDefinition', async () => { // setup - storage.splits.addSplit({ name: 'something' }); - storage.splits.addSplit({ name: 'something else' }); + storage.definitions.add({ name: 'something' }); + storage.definitions.add({ name: 'something else' }); - const splitsSyncTask = splitsSyncTaskMock(storage.splits); - const splitUpdateWorker = SplitsUpdateWorker(loggerMock, storage, splitsSyncTask, splitsEventEmitterMock, telemetryTracker); + const splitsSyncTask = splitsSyncTaskMock(storage.definitions); + const splitUpdateWorker = DefinitionsUpdateWorker(loggerMock, storage, splitsSyncTask, definitionsEventEmitterMock, telemetryTracker); - // assert killing split locally, emitting SDK_SPLITS_ARRIVED event, and synchronizing splits if changeNumber is new - splitUpdateWorker.killSplit({ changeNumber: 100, splitName: 'something', defaultTreatment: 'off' }); // splitsCache.killLocally is synchronous + // assert killing split locally, emitting SDK_DEFINITIONS_ARRIVED event, and synchronizing splits if changeNumber is new + splitUpdateWorker.killDefinition({ changeNumber: 100, splitName: 'something', defaultTreatment: 'off' }); // splitsCache.killLocally is synchronous expect(splitsSyncTask.execute).toBeCalledTimes(1); // synchronizes splits if `isExecuting` is false - expect(splitsEventEmitterMock.emit.mock.calls).toEqual([[SDK_SPLITS_ARRIVED, true]]); // emits `SDK_SPLITS_ARRIVED` with `isSplitKill` flag in true, if split kill resolves with update - assertKilledSplit(storage.splits, 100, 'something', 'off'); + expect(definitionsEventEmitterMock.emit.mock.calls).toEqual([[SDK_DEFINITIONS_ARRIVED, true]]); // emits `SDK_DEFINITIONS_ARRIVED` with `isSplitKill` flag in true, if split kill resolves with update + assertKilledSplit(storage.definitions, 100, 'something', 'off'); - // assert not killing split locally, not emitting SDK_SPLITS_ARRIVED event, and not synchronizes splits, if changeNumber is old + // assert not killing split locally, not emitting SDK_DEFINITIONS_ARRIVED event, and not synchronizes splits, if changeNumber is old splitsSyncTask.__resolveSplitsUpdaterCall(100); await new Promise(res => setTimeout(res)); splitsSyncTask.execute.mockClear(); - splitsEventEmitterMock.emit.mockClear(); - splitUpdateWorker.killSplit({ changeNumber: 90, splitName: 'something', defaultTreatment: 'on' }); + definitionsEventEmitterMock.emit.mockClear(); + splitUpdateWorker.killDefinition({ changeNumber: 90, splitName: 'something', defaultTreatment: 'on' }); await new Promise(res => setTimeout(res)); expect(splitsSyncTask.execute).toBeCalledTimes(0); // doesn't synchronize splits if killLocally resolved without update - expect(splitsEventEmitterMock.emit).toBeCalledTimes(0); // doesn't emit `SDK_SPLITS_ARRIVED` if killLocally resolved without update + expect(definitionsEventEmitterMock.emit).toBeCalledTimes(0); // doesn't emit `SDK_DEFINITIONS_ARRIVED` if killLocally resolved without update - assertKilledSplit(storage.splits, 100, 'something', 'off'); // calling `killLocally` with an old changeNumber made no effect + assertKilledSplit(storage.definitions, 100, 'something', 'off'); // calling `killLocally` with an old changeNumber made no effect }); test('stop', async () => { // setup - const splitsSyncTask = splitsSyncTaskMock(storage.splits, [95]); + const splitsSyncTask = splitsSyncTaskMock(storage.definitions, [95]); Backoff.__TEST__BASE_MILLIS = 1; - const splitUpdateWorker = SplitsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); + const splitUpdateWorker = DefinitionsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); splitUpdateWorker.put({ changeNumber: 100 }); @@ -216,9 +216,9 @@ describe('SplitsUpdateWorker', () => { test('put, avoid fetching if payload sent', async () => { splitNotifications.forEach(notification => { - const pcn = storage.splits.getChangeNumber(); - const splitsSyncTask = splitsSyncTaskMock(storage.splits); - const splitUpdateWorker = SplitsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); + const pcn = storage.definitions.getChangeNumber(); + const splitsSyncTask = splitsSyncTaskMock(storage.definitions); + const splitUpdateWorker = DefinitionsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); const payload = notification.decoded; const changeNumber = payload.changeNumber; splitUpdateWorker.put({ changeNumber, pcn, d: notification.data, c: notification.compression }); // queued @@ -232,11 +232,11 @@ describe('SplitsUpdateWorker', () => { let ccn = 103; let pcn = 104; let changeNumber = 105; - storage.splits.setChangeNumber(ccn); + storage.definitions.setChangeNumber(ccn); const notification = splitNotifications[0]; - let splitsSyncTask = splitsSyncTaskMock(storage.splits); - let splitUpdateWorker = SplitsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); + let splitsSyncTask = splitsSyncTaskMock(storage.definitions); + let splitUpdateWorker = DefinitionsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); splitUpdateWorker.put({ changeNumber, pcn, d: notification.data, c: notification.compression }); expect(splitsSyncTask.execute).toBeCalledTimes(1); expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, undefined]); @@ -246,10 +246,10 @@ describe('SplitsUpdateWorker', () => { ccn = 110; pcn = 0; changeNumber = 111; - storage.splits.setChangeNumber(ccn); + storage.definitions.setChangeNumber(ccn); - splitsSyncTask = splitsSyncTaskMock(storage.splits); - splitUpdateWorker = SplitsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); + splitsSyncTask = splitsSyncTaskMock(storage.definitions); + splitUpdateWorker = DefinitionsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); splitUpdateWorker.put({ changeNumber, pcn, d: notification.data, c: notification.compression }); expect(splitsSyncTask.execute).toBeCalledTimes(1); expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, undefined]); @@ -259,10 +259,10 @@ describe('SplitsUpdateWorker', () => { ccn = 120; pcn = 120; changeNumber = 121; - storage.splits.setChangeNumber(ccn); + storage.definitions.setChangeNumber(ccn); - splitsSyncTask = splitsSyncTaskMock(storage.splits); - splitUpdateWorker = SplitsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); + splitsSyncTask = splitsSyncTaskMock(storage.definitions); + splitUpdateWorker = DefinitionsUpdateWorker(loggerMock, storage, splitsSyncTask, telemetryTracker); splitUpdateWorker.put({ changeNumber, pcn, d: notification.data, c: notification.compression }); expect(splitsSyncTask.execute).toBeCalledTimes(1); expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, { payload: notification.decoded, changeNumber }]); diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index b1d4b730..535e8f1c 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -6,7 +6,7 @@ import { Backoff } from '../../utils/Backoff'; import { SSEHandlerFactory } from './SSEHandler'; import { MySegmentsUpdateWorker } from './UpdateWorkers/MySegmentsUpdateWorker'; import { SegmentsUpdateWorker } from './UpdateWorkers/SegmentsUpdateWorker'; -import { SplitsUpdateWorker } from './UpdateWorkers/SplitsUpdateWorker'; +import { DefinitionsUpdateWorker } from './UpdateWorkers/DefinitionsUpdateWorker'; import { authenticateFactory, hashUserKey } from './AuthClient'; import { forOwn } from '../../utils/lang'; import { SSEClient } from './SSEClient'; @@ -55,8 +55,8 @@ export function pushManagerFactory( // init workers // MySegmentsUpdateWorker (client-side) are initiated in `add` method const segmentsUpdateWorker = userKey ? undefined : SegmentsUpdateWorker(log, pollingManager.segmentsSyncTask as ISegmentsSyncTask, storage.segments); - // For server-side we pass the segmentsSyncTask, used by SplitsUpdateWorker to fetch new segments - const splitsUpdateWorker = SplitsUpdateWorker(log, storage, pollingManager.definitionsSyncTask, readiness.splits, telemetryTracker, userKey ? undefined : pollingManager.segmentsSyncTask as ISegmentsSyncTask); + // For server-side we pass the segmentsSyncTask, used by DefinitionsUpdateWorker to fetch new segments + const definitionsUpdateWorker = DefinitionsUpdateWorker(log, storage, pollingManager.definitionsSyncTask, readiness.definitions, telemetryTracker, userKey ? undefined : pollingManager.segmentsSyncTask as ISegmentsSyncTask); // [Only for client-side] map of hashes to user keys, to dispatch membership update events to the corresponding MySegmentsUpdateWorker const userKeyHashes: Record = {}; @@ -168,7 +168,7 @@ export function pushManagerFactory( // cancel scheduled fetch retries of Splits, Segments, and MySegments Update Workers function stopWorkers() { - splitsUpdateWorker.stop(); + definitionsUpdateWorker.stop(); if (userKey) forOwn(clients, ({ worker }) => worker.stop()); else segmentsUpdateWorker!.stop(); } @@ -218,9 +218,9 @@ export function pushManagerFactory( /** Functions related to synchronization (Queues and Workers in the spec) */ - pushEmitter.on(SPLIT_KILL, splitsUpdateWorker.killSplit); - pushEmitter.on(SPLIT_UPDATE, splitsUpdateWorker.put); - pushEmitter.on(RB_SEGMENT_UPDATE, splitsUpdateWorker.put); + pushEmitter.on(SPLIT_KILL, definitionsUpdateWorker.killDefinition); + pushEmitter.on(SPLIT_UPDATE, definitionsUpdateWorker.put); + pushEmitter.on(RB_SEGMENT_UPDATE, definitionsUpdateWorker.put); function handleMySegmentsUpdate(parsedData: IMembershipMSUpdateData | IMembershipLSUpdateData) { switch (parsedData.u) { diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index f3709fa1..e2f24747 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -9,8 +9,8 @@ import { SYNC_START_POLLING, SYNC_CONTINUE_POLLING, SYNC_STOP_POLLING } from '.. import { isConsentGranted } from '../consent'; import { POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; import { ISdkFactoryContextSync } from '../sdkFactory/types'; -import { SDK_SPLITS_CACHE_LOADED } from '../readiness/constants'; -import { usesSegmentsSync } from '../storages/AbstractSplitsCacheSync'; +import { SDK_DEFINITIONS_CACHE_LOADED } from '../readiness/constants'; +import { usesSegmentsSync } from '../storages/AbstractDefinitionsCacheSync'; import { splitChangesFetcherFactory } from './polling/fetchers/splitChangesFetcher'; import { IDefinitionChangesFetcher } from './polling/fetchers/types'; @@ -102,7 +102,7 @@ export function syncManagerOnlineFactory( if (startFirstTime) { // Emits SDK_READY_FROM_CACHE if (!cacheMetadata.initialCacheLoad) { - readiness.splits.emit(SDK_SPLITS_CACHE_LOADED, cacheMetadata); + readiness.definitions.emit(SDK_DEFINITIONS_CACHE_LOADED, cacheMetadata); } } diff --git a/src/utils/inputValidation/__tests__/trafficTypeExistence.spec.ts b/src/utils/inputValidation/__tests__/trafficTypeExistence.spec.ts index a71912cf..c6fcb434 100644 --- a/src/utils/inputValidation/__tests__/trafficTypeExistence.spec.ts +++ b/src/utils/inputValidation/__tests__/trafficTypeExistence.spec.ts @@ -1,5 +1,5 @@ import { IReadinessManager } from '../../../readiness/types'; -import { ISplitsCacheBase } from '../../../storages/types'; +import { IDefinitionsCacheBase } from '../../../storages/types'; import { LOCALHOST_MODE, STANDALONE_MODE } from '../../constants'; import { thenable } from '../../promise/thenable'; import { WARN_NOT_EXISTENT_TT } from '../../../logger/constants'; @@ -25,7 +25,7 @@ const splitsCacheMock = { } return false; }) -} as ISplitsCacheBase & { trafficTypeExists: jest.Mock }; +} as IDefinitionsCacheBase & { trafficTypeExists: jest.Mock }; /** Test target */ import { validateTrafficTypeExistence } from '../trafficTypeExistence'; diff --git a/src/utils/inputValidation/trafficTypeExistence.ts b/src/utils/inputValidation/trafficTypeExistence.ts index 8040f849..07741e22 100644 --- a/src/utils/inputValidation/trafficTypeExistence.ts +++ b/src/utils/inputValidation/trafficTypeExistence.ts @@ -1,6 +1,6 @@ import { thenable } from '../promise/thenable'; import { LOCALHOST_MODE } from '../constants'; -import { ISplitsCacheBase } from '../../storages/types'; +import { IDefinitionsCacheBase } from '../../storages/types'; import { IReadinessManager } from '../../readiness/types'; import SplitIO from '../../../types/splitio'; import { MaybeThenable } from '../../dtos/types'; @@ -14,7 +14,7 @@ function logTTExistenceWarning(log: ILogger, maybeTT: string, method: string) { /** * Separated from the previous method since on some cases it'll be async. */ -export function validateTrafficTypeExistence(log: ILogger, readinessManager: IReadinessManager, splitsCache: ISplitsCacheBase, mode: SplitIO.SDKMode, maybeTT: string, method: string): MaybeThenable { +export function validateTrafficTypeExistence(log: ILogger, readinessManager: IReadinessManager, splitsCache: IDefinitionsCacheBase, mode: SplitIO.SDKMode, maybeTT: string, method: string): MaybeThenable { // If not ready or in localhost mode, we won't run the validation if (!readinessManager.isReady() || mode === LOCALHOST_MODE) return true;