diff --git a/src/evaluator/matchers/__tests__/dependency.spec.ts b/src/evaluator/matchers/__tests__/dependency.spec.ts index 74d833d6..e9e9babc 100644 --- a/src/evaluator/matchers/__tests__/dependency.spec.ts +++ b/src/evaluator/matchers/__tests__/dependency.spec.ts @@ -5,9 +5,7 @@ import { IMatcher, IMatcherDto } from '../../types'; import { IStorageSync } from '../../../storages/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { ISplit } from '../../../dtos/types'; - -const ALWAYS_ON_SPLIT = { 'trafficTypeName': 'user', 'name': 'always-on', 'trafficAllocation': 100, 'trafficAllocationSeed': 1012950810, 'seed': -725161385, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'changeNumber': 1494364996459, 'algo': 2, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }], 'sets':[] } as ISplit; -const ALWAYS_OFF_SPLIT = { 'trafficTypeName': 'user', 'name': 'always-off', 'trafficAllocation': 100, 'trafficAllocationSeed': -331690370, 'seed': 403891040, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'on', 'changeNumber': 1494365020316, 'algo': 2, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 0 }, { 'treatment': 'off', 'size': 100 }], 'label': 'in segment all' }], 'sets':[] } as ISplit; +import { ALWAYS_ON_SPLIT, ALWAYS_OFF_SPLIT } from '../../../storages/__tests__/testUtils'; const STORED_SPLITS: Record = { 'always-on': ALWAYS_ON_SPLIT, diff --git a/src/evaluator/matchers/__tests__/rbsegment.spec.ts b/src/evaluator/matchers/__tests__/rbsegment.spec.ts new file mode 100644 index 00000000..c662776d --- /dev/null +++ b/src/evaluator/matchers/__tests__/rbsegment.spec.ts @@ -0,0 +1,243 @@ +import { matcherTypes } from '../matcherTypes'; +import { matcherFactory } from '..'; +import { evaluateFeature } from '../../index'; +import { IMatcherDto } from '../../types'; +import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; +import { IRBSegment, ISplit } from '../../../dtos/types'; +import { IStorageAsync, IStorageSync } from '../../../storages/types'; +import { thenable } from '../../../utils/promise/thenable'; +import { ALWAYS_ON_SPLIT } from '../../../storages/__tests__/testUtils'; + +const STORED_SPLITS: Record = { + 'always-on': ALWAYS_ON_SPLIT +}; + +const STORED_SEGMENTS: Record> = { + 'segment_test': new Set(['emi@split.io']), + 'regular_segment': new Set(['nadia@split.io']) +}; + +const STORED_RBSEGMENTS: Record = { + 'mauro_rule_based_segment': { + changeNumber: 5, + name: 'mauro_rule_based_segment', + status: 'ACTIVE', + excluded: { + keys: ['mauro@split.io', 'gaston@split.io'], + segments: ['segment_test'] + }, + conditions: [ + { + matcherGroup: { + combiner: 'AND', + matchers: [ + { + keySelector: { + trafficType: 'user', + attribute: 'location', + }, + matcherType: 'WHITELIST', + negate: false, + whitelistMatcherData: { + whitelist: [ + 'mdp', + 'tandil', + 'bsas' + ] + } + }, + { + keySelector: { + trafficType: 'user', + attribute: null + }, + matcherType: 'ENDS_WITH', + negate: false, + whitelistMatcherData: { + whitelist: [ + '@split.io' + ] + } + } + ] + } + }, + { + matcherGroup: { + combiner: 'AND', + matchers: [ + { + keySelector: { + trafficType: 'user', + attribute: null + }, + matcherType: 'IN_SEGMENT', + negate: false, + userDefinedSegmentMatcherData: { + segmentName: 'regular_segment' + } + } + ] + } + } + ] + }, + 'depend_on_always_on': { + name: 'depend_on_always_on', + changeNumber: 123, + status: 'ACTIVE', + excluded: { + keys: [], + segments: [] + }, + conditions: [{ + matcherGroup: { + combiner: 'AND', + matchers: [{ + matcherType: 'IN_SPLIT_TREATMENT', + keySelector: { + trafficType: 'user', + attribute: null + }, + negate: false, + dependencyMatcherData: { + split: 'always-on', + treatments: [ + 'on', + ] + } + }] + } + }] + }, + 'depend_on_mauro_rule_based_segment': { + name: 'depend_on_mauro_rule_based_segment', + changeNumber: 123, + status: 'ACTIVE', + excluded: { + keys: [], + segments: [] + }, + conditions: [{ + matcherGroup: { + combiner: 'AND', + matchers: [{ + matcherType: 'IN_RULE_BASED_SEGMENT', + keySelector: { + trafficType: 'user', + attribute: null + }, + negate: false, + userDefinedSegmentMatcherData: { + segmentName: 'mauro_rule_based_segment' + } + }] + } + }] + }, +}; + +const mockStorageSync = { + isSync: true, + splits: { + getSplit(name: string) { + return STORED_SPLITS[name]; + } + }, + segments: { + isInSegment(segmentName: string, matchingKey: string) { + return STORED_SEGMENTS[segmentName] ? STORED_SEGMENTS[segmentName].has(matchingKey) : false; + } + }, + rbSegments: { + get(rbsegmentName: string) { + return STORED_RBSEGMENTS[rbsegmentName]; + } + } +} as unknown as IStorageSync; + +const mockStorageAsync = { + isSync: false, + splits: { + getSplit(name: string) { + return Promise.resolve(STORED_SPLITS[name]); + } + }, + segments: { + isInSegment(segmentName: string, matchingKey: string) { + return Promise.resolve(STORED_SEGMENTS[segmentName] ? STORED_SEGMENTS[segmentName].has(matchingKey) : false); + } + }, + rbSegments: { + get(rbsegmentName: string) { + return Promise.resolve(STORED_RBSEGMENTS[rbsegmentName]); + } + } +} as unknown as IStorageAsync; + +describe.each([ + { mockStorage: mockStorageSync, isAsync: false }, + { mockStorage: mockStorageAsync, isAsync: true } +])('MATCHER IN_RULE_BASED_SEGMENT', ({ mockStorage, isAsync }) => { + test('should support excluded keys, excluded segments, and multiple conditions', async () => { + const matcher = matcherFactory(loggerMock, { + type: matcherTypes.IN_RULE_BASED_SEGMENT, + value: 'mauro_rule_based_segment' + } as IMatcherDto, mockStorage)!; + + const dependentMatcher = matcherFactory(loggerMock, { + type: matcherTypes.IN_RULE_BASED_SEGMENT, + value: 'depend_on_mauro_rule_based_segment' + } as IMatcherDto, mockStorage)!; + + [matcher, dependentMatcher].forEach(async matcher => { + + // should return false if the provided key is excluded (even if some condition is met) + let match = matcher({ key: 'mauro@split.io', attributes: { location: 'mdp' } }, evaluateFeature); + expect(thenable(match)).toBe(isAsync); + expect(await match).toBe(false); + + // should return false if the provided key is in some excluded segment (even if some condition is met) + match = matcher({ key: 'emi@split.io', attributes: { location: 'tandil' } }, evaluateFeature); + expect(thenable(match)).toBe(isAsync); + expect(await match).toBe(false); + + // should return false if doesn't match any condition + match = matcher({ key: 'zeta@split.io' }, evaluateFeature); + expect(thenable(match)).toBe(isAsync); + expect(await match).toBe(false); + match = matcher({ key: { matchingKey: 'zeta@split.io', bucketingKey: '123' }, attributes: { location: 'italy' } }, evaluateFeature); + expect(thenable(match)).toBe(isAsync); + expect(await match).toBe(false); + + // should return true if match the first condition: location attribute in whitelist and key ends with '@split.io' + match = matcher({ key: 'emma@split.io', attributes: { location: 'tandil' } }, evaluateFeature); + expect(thenable(match)).toBe(isAsync); + expect(await match).toBe(true); + + // should return true if match the second condition: key in regular_segment + match = matcher({ key: { matchingKey: 'nadia@split.io', bucketingKey: '123' }, attributes: { location: 'mdp' } }, evaluateFeature); + expect(thenable(match)).toBe(isAsync); + expect(await match).toBe(true); + }); + }); + + test('edge cases', async () => { + const matcherNotExist = matcherFactory(loggerMock, { + type: matcherTypes.IN_RULE_BASED_SEGMENT, + value: 'non_existent_segment' + } as IMatcherDto, mockStorageSync)!; + + // should return false if the provided segment does not exist + expect(await matcherNotExist({ key: 'a-key' }, evaluateFeature)).toBe(false); + + const matcherTrueAlwaysOn = matcherFactory(loggerMock, { + type: matcherTypes.IN_RULE_BASED_SEGMENT, + value: 'depend_on_always_on' + } as IMatcherDto, mockStorageSync)!; + + // should support feature flag dependency matcher + expect(await matcherTrueAlwaysOn({ key: 'a-key' }, evaluateFeature)).toBe(true); // Parent split returns one of the expected treatments, so the matcher returns true + }); + +}); diff --git a/src/evaluator/matchers/index.ts b/src/evaluator/matchers/index.ts index 3fd840f4..f54cc313 100644 --- a/src/evaluator/matchers/index.ts +++ b/src/evaluator/matchers/index.ts @@ -24,6 +24,7 @@ import { inListSemverMatcherContext } from './semver_inlist'; import { IStorageAsync, IStorageSync } from '../../storages/types'; import { IMatcher, IMatcherDto } from '../types'; import { ILogger } from '../../logger/types'; +import { ruleBasedSegmentMatcherContext } from './rbsegment'; const matchers = [ undefined, // UNDEFINED: 0 @@ -50,6 +51,7 @@ const matchers = [ betweenSemverMatcherContext, // BETWEEN_SEMVER: 21 inListSemverMatcherContext, // IN_LIST_SEMVER: 22 largeSegmentMatcherContext, // IN_LARGE_SEGMENT: 23 + ruleBasedSegmentMatcherContext // IN_RULE_BASED_SEGMENT: 24 ]; /** diff --git a/src/evaluator/matchers/matcherTypes.ts b/src/evaluator/matchers/matcherTypes.ts index f09d50bf..0c5faf4b 100644 --- a/src/evaluator/matchers/matcherTypes.ts +++ b/src/evaluator/matchers/matcherTypes.ts @@ -23,6 +23,7 @@ export const matcherTypes: Record = { BETWEEN_SEMVER: 21, IN_LIST_SEMVER: 22, IN_LARGE_SEGMENT: 23, + IN_RULE_BASED_SEGMENT: 24, }; export const matcherDataTypes = { diff --git a/src/evaluator/matchers/rbsegment.ts b/src/evaluator/matchers/rbsegment.ts new file mode 100644 index 00000000..68318320 --- /dev/null +++ b/src/evaluator/matchers/rbsegment.ts @@ -0,0 +1,61 @@ +import { IRBSegment, MaybeThenable } from '../../dtos/types'; +import { IStorageAsync, IStorageSync } from '../../storages/types'; +import { ILogger } from '../../logger/types'; +import { IDependencyMatcherValue, ISplitEvaluator } from '../types'; +import { thenable } from '../../utils/promise/thenable'; +import { getMatching, keyParser } from '../../utils/key'; +import { parser } from '../parser'; + + +export function ruleBasedSegmentMatcherContext(segmentName: string, storage: IStorageSync | IStorageAsync, log: ILogger) { + + return function ruleBasedSegmentMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: ISplitEvaluator): MaybeThenable { + + function matchConditions(rbsegment: IRBSegment) { + const conditions = rbsegment.conditions; + const evaluator = parser(log, conditions, storage); + + const evaluation = evaluator( + keyParser(key), + undefined, + undefined, + undefined, + attributes, + splitEvaluator + ); + + return thenable(evaluation) ? + evaluation.then(evaluation => evaluation ? true : false) : + evaluation ? true : false; + } + + function isExcluded(rbSegment: IRBSegment) { + const matchingKey = getMatching(key); + + if (rbSegment.excluded.keys.indexOf(matchingKey) !== -1) return true; + + const isInSegment = rbSegment.excluded.segments.map(segmentName => { + return storage.segments.isInSegment(segmentName, matchingKey); + }); + + return isInSegment.length && thenable(isInSegment[0]) ? + Promise.all(isInSegment).then(results => results.some(result => result)) : + isInSegment.some(result => result); + } + + function isInSegment(rbSegment: IRBSegment | null) { + if (!rbSegment) return false; + const excluded = isExcluded(rbSegment); + + return thenable(excluded) ? + excluded.then(excluded => excluded ? false : matchConditions(rbSegment)) : + excluded ? false : matchConditions(rbSegment); + } + + const rbSegment = storage.rbSegments.get(segmentName); + + return thenable(rbSegment) ? + rbSegment.then(isInSegment) : + isInSegment(rbSegment); + }; +} diff --git a/src/evaluator/matchersTransform/index.ts b/src/evaluator/matchersTransform/index.ts index a5be15e3..6219c4dc 100644 --- a/src/evaluator/matchersTransform/index.ts +++ b/src/evaluator/matchersTransform/index.ts @@ -95,6 +95,9 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { type === matcherTypes.LESS_THAN_OR_EQUAL_TO_SEMVER ) { value = stringMatcherData; + } else if (type === matcherTypes.IN_RULE_BASED_SEGMENT) { + value = segmentTransform(userDefinedSegmentMatcherData as IInSegmentMatcherData); + dataType = matcherDataTypes.NOT_SPECIFIED; } return { diff --git a/src/evaluator/value/sanitize.ts b/src/evaluator/value/sanitize.ts index 5d0d6e28..de92efc7 100644 --- a/src/evaluator/value/sanitize.ts +++ b/src/evaluator/value/sanitize.ts @@ -60,6 +60,7 @@ function getProcessingFunction(matcherTypeID: number, dataType: string) { case matcherTypes.BETWEEN: return dataType === 'DATETIME' ? zeroSinceSS : undefined; case matcherTypes.IN_SPLIT_TREATMENT: + case matcherTypes.IN_RULE_BASED_SEGMENT: return dependencyProcessor; default: return undefined; diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index b03dbc7d..97f1e383 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -80,8 +80,8 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { * Given a parsed split, it returns a boolean flagging if its conditions use segments matchers (rules & whitelists). * This util is intended to simplify the implementation of `splitsCache::usesSegments` method */ -export function usesSegments(split: ISplit | IRBSegment) { - const conditions = split.conditions || []; +export function usesSegments(ruleEntity: ISplit | IRBSegment) { + const conditions = ruleEntity.conditions || []; for (let i = 0; i < conditions.length; i++) { const matchers = conditions[i].matcherGroup.matchers; @@ -91,5 +91,8 @@ export function usesSegments(split: ISplit | IRBSegment) { } } + const excluded = (ruleEntity as IRBSegment).excluded; + if (excluded && excluded.segments && excluded.segments.length > 0) return true; + return false; } diff --git a/src/storages/KeyBuilder.ts b/src/storages/KeyBuilder.ts index 7e9b7e85..4167d860 100644 --- a/src/storages/KeyBuilder.ts +++ b/src/storages/KeyBuilder.ts @@ -37,8 +37,8 @@ export class KeyBuilder { return `${this.prefix}.split.`; } - buildRBSegmentKey(splitName: string) { - return `${this.prefix}.rbsegment.${splitName}`; + buildRBSegmentKey(rbsegmentName: string) { + return `${this.prefix}.rbsegment.${rbsegmentName}`; } buildRBSegmentsTillKey() { diff --git a/src/storages/__tests__/testUtils.ts b/src/storages/__tests__/testUtils.ts index a4009b1c..b2ae79dc 100644 --- a/src/storages/__tests__/testUtils.ts +++ b/src/storages/__tests__/testUtils.ts @@ -22,15 +22,13 @@ export function assertSyncRecorderCacheInterface(cache: IEventsCacheSync | IImpr // Split mocks -//@ts-ignore -export const splitWithUserTT: ISplit = { name: 'user_ff', trafficTypeName: 'user_tt', conditions: [] }; -//@ts-ignore -export const splitWithAccountTT: ISplit = { name: 'account_ff', trafficTypeName: 'account_tt', conditions: [] }; -//@ts-ignore -export const splitWithAccountTTAndUsesSegments: ISplit = { trafficTypeName: 'account_tt', conditions: [{ matcherGroup: { matchers: [{ matcherType: 'IN_SEGMENT', userDefinedSegmentMatcherData: { segmentName: 'employees' } }] } }] }; -//@ts-ignore -export const something: ISplit = { name: 'something' }; -//@ts-ignore + +export const ALWAYS_ON_SPLIT: ISplit = { 'trafficTypeName': 'user', 'name': 'always-on', 'trafficAllocation': 100, 'trafficAllocationSeed': 1012950810, 'seed': -725161385, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'changeNumber': 1494364996459, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }], 'sets': [] }; +export const ALWAYS_OFF_SPLIT: ISplit = { 'trafficTypeName': 'user', 'name': 'always-off', 'trafficAllocation': 100, 'trafficAllocationSeed': -331690370, 'seed': 403891040, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'on', 'changeNumber': 1494365020316, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 0 }, { 'treatment': 'off', 'size': 100 }], 'label': 'in segment all' }], 'sets': [] }; //@ts-ignore +export const splitWithUserTT: ISplit = { name: 'user_ff', trafficTypeName: 'user_tt', conditions: [] }; //@ts-ignore +export const splitWithAccountTT: ISplit = { name: 'account_ff', trafficTypeName: 'account_tt', conditions: [] }; //@ts-ignore +export const splitWithAccountTTAndUsesSegments: ISplit = { trafficTypeName: 'account_tt', conditions: [{ matcherGroup: { matchers: [{ matcherType: 'IN_SEGMENT', userDefinedSegmentMatcherData: { segmentName: 'employees' } }] } }] }; //@ts-ignore +export const something: ISplit = { name: 'something' }; //@ts-ignore export const somethingElse: ISplit = { name: 'something else' }; // - With flag sets @@ -38,11 +36,11 @@ export const somethingElse: ISplit = { name: 'something else' }; //@ts-ignore export const featureFlagWithEmptyFS: ISplit = { name: 'ff_empty', sets: [] }; //@ts-ignore -export const featureFlagOne: ISplit = { name: 'ff_one', sets: ['o','n','e'] }; +export const featureFlagOne: ISplit = { name: 'ff_one', sets: ['o', 'n', 'e'] }; //@ts-ignore -export const featureFlagTwo: ISplit = { name: 'ff_two', sets: ['t','w','o'] }; +export const featureFlagTwo: ISplit = { name: 'ff_two', sets: ['t', 'w', 'o'] }; //@ts-ignore -export const featureFlagThree: ISplit = { name: 'ff_three', sets: ['t','h','r','e'] }; +export const featureFlagThree: ISplit = { name: 'ff_three', sets: ['t', 'h', 'r', 'e'] }; //@ts-ignore export const featureFlagWithoutFS: ISplit = { name: 'ff_four' }; diff --git a/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts b/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts index 85f73a56..37f6ad8e 100644 --- a/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts @@ -38,7 +38,7 @@ export class RBSegmentsCacheInLocal implements IRBSegmentsCacheSync { } } - private updateSegmentCount(diff: number){ + private updateSegmentCount(diff: number) { const segmentsCountKey = this.keys.buildSplitsWithSegmentCountKey(); const count = toNumber(localStorage.getItem(segmentsCountKey)) + diff; // @ts-expect-error @@ -128,11 +128,9 @@ export class RBSegmentsCacheInLocal implements IRBSegmentsCacheSync { const storedCount = localStorage.getItem(this.keys.buildSplitsWithSegmentCountKey()); const splitsWithSegmentsCount = storedCount === null ? 0 : toNumber(storedCount); - if (isFiniteNumber(splitsWithSegmentsCount)) { - return splitsWithSegmentsCount > 0; - } else { - return true; - } + return isFiniteNumber(splitsWithSegmentsCount) ? + splitsWithSegmentsCount > 0 : + true; } } diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index 385125e3..f506b46d 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -206,11 +206,9 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { const storedCount = localStorage.getItem(this.keys.buildSplitsWithSegmentCountKey()); const splitsWithSegmentsCount = storedCount === null ? 0 : toNumber(storedCount); - if (isFiniteNumber(splitsWithSegmentsCount)) { - return splitsWithSegmentsCount > 0; - } else { - return true; - } + return isFiniteNumber(splitsWithSegmentsCount) ? + splitsWithSegmentsCount > 0 : + true; } /** diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 4ce0882a..6a5ba679 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -43,10 +43,10 @@ export function pollingManagerCSFactory( // smart pausing readiness.splits.on(SDK_SPLITS_ARRIVED, () => { if (!splitsSyncTask.isRunning()) return; // noop if not doing polling - const splitsHaveSegments = storage.splits.usesSegments(); - if (splitsHaveSegments !== mySegmentsSyncTask.isRunning()) { - log.info(POLLING_SMART_PAUSING, [splitsHaveSegments ? 'ON' : 'OFF']); - if (splitsHaveSegments) { + const usingSegments = storage.splits.usesSegments() || storage.rbSegments.usesSegments(); + if (usingSegments !== mySegmentsSyncTask.isRunning()) { + log.info(POLLING_SMART_PAUSING, [usingSegments ? 'ON' : 'OFF']); + if (usingSegments) { startMySegmentsSyncTasks(); } else { stopMySegmentsSyncTasks(); @@ -59,9 +59,9 @@ export function pollingManagerCSFactory( // smart ready function smartReady() { - if (!readiness.isReady() && !storage.splits.usesSegments()) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + if (!readiness.isReady() && !storage.splits.usesSegments() && !storage.rbSegments.usesSegments()) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); } - if (!storage.splits.usesSegments()) setTimeout(smartReady, 0); + if (!storage.splits.usesSegments() && !storage.rbSegments.usesSegments()) setTimeout(smartReady, 0); else readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); mySegmentsSyncTasks[matchingKey] = mySegmentsSyncTask; @@ -77,7 +77,7 @@ export function pollingManagerCSFactory( log.info(POLLING_START); splitsSyncTask.start(); - if (storage.splits.usesSegments()) startMySegmentsSyncTasks(); + if (storage.splits.usesSegments() || storage.rbSegments.usesSegments()) startMySegmentsSyncTasks(); }, // Stop periodic fetching (polling) diff --git a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts b/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts index da85fbdd..a77e9516 100644 --- a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts +++ b/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts @@ -13,6 +13,7 @@ import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; import { telemetryTrackerFactory } from '../../../../trackers/telemetryTracker'; import { splitNotifications } from '../../../streaming/__tests__/dataMocks'; import { RBSegmentsCacheInMemory } from '../../../../storages/inMemory/RBSegmentsCacheInMemory'; +import { RB_SEGMENT_UPDATE, SPLIT_UPDATE } from '../../../streaming/constants'; const ARCHIVED_FF = 'ARCHIVED'; @@ -202,7 +203,7 @@ describe('splitChangesUpdater', () => { const payload = notification.decoded as Pick; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber })).resolves.toBe(true); + await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); // fetch and RBSegments.update not being called expect(fetchSplitChanges).toBeCalledTimes(0); @@ -226,7 +227,7 @@ describe('splitChangesUpdater', () => { const payload = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber })).resolves.toBe(true); + await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); // fetch and Splits.update not being called expect(fetchSplitChanges).toBeCalledTimes(0); @@ -256,7 +257,7 @@ describe('splitChangesUpdater', () => { let calls = 0; // emit always if not configured sets for (const setMock of setMocks) { - await expect(splitChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index })).resolves.toBe(true); + await expect(splitChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); expect(splitsEmitSpy.mock.calls[index][0]).toBe('state::splits-arrived'); index++; } @@ -268,7 +269,7 @@ describe('splitChangesUpdater', () => { splitsEmitSpy.mockReset(); index = 0; for (const setMock of setMocks) { - await expect(splitChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index })).resolves.toBe(true); + await expect(splitChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); if (setMock.shouldEmit) calls++; expect(splitsEmitSpy.mock.calls.length).toBe(calls); index++; diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 32d9f78e..501e3b7a 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -27,7 +27,7 @@ export function mySegmentsUpdaterFactory( matchingKey: string ): IMySegmentsUpdater { - const { splits, segments, largeSegments } = storage; + const { splits, rbSegments, segments, largeSegments } = storage; let readyOnAlreadyExistentState = true; let startingUp = true; @@ -51,7 +51,7 @@ export function mySegmentsUpdaterFactory( } // Notify update if required - if (splits.usesSegments() && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { + if ((splits.usesSegments() || rbSegments.usesSegments()) && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { readyOnAlreadyExistentState = false; segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED); } diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/splitChangesUpdater.ts index 6f22dccc..eccb7d09 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/splitChangesUpdater.ts @@ -9,8 +9,10 @@ import { SYNC_SPLITS_FETCH, SYNC_SPLITS_UPDATE, SYNC_RBS_UPDATE, SYNC_SPLITS_FET import { startsWith } from '../../../utils/lang'; import { IN_RULE_BASED_SEGMENT, IN_SEGMENT } from '../../../utils/constants'; import { setToArray } from '../../../utils/lang/sets'; +import { SPLIT_UPDATE } from '../../streaming/constants'; -type ISplitChangesUpdater = (noCache?: boolean, till?: number, splitUpdateNotification?: { payload: ISplit | IRBSegment, changeNumber: number }) => Promise +export type InstantUpdate = { payload: ISplit | IRBSegment, changeNumber: number, type: string }; +type SplitChangesUpdater = (noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) => Promise // Checks that all registered segments have been fetched (changeNumber !== -1 for every segment). // Returns a promise that could be rejected. @@ -27,8 +29,9 @@ function checkAllSegmentsExist(segments: ISegmentsCacheBase): Promise { * Collect segments from a raw split definition. * Exported for testing purposes. */ -export function parseSegments({ conditions }: ISplit | IRBSegment, matcherType: typeof IN_SEGMENT | typeof IN_RULE_BASED_SEGMENT = IN_SEGMENT): Set { - let segments = new Set(); +export function parseSegments(ruleEntity: ISplit | IRBSegment, matcherType: typeof IN_SEGMENT | typeof IN_RULE_BASED_SEGMENT = IN_SEGMENT): Set { + const { conditions, excluded } = ruleEntity as IRBSegment; + const segments = new Set(excluded && excluded.segments); for (let i = 0; i < conditions.length; i++) { const matchers = conditions[i].matcherGroup.matchers; @@ -67,10 +70,6 @@ function matchFilters(featureFlag: ISplit, filters: ISplitFiltersValidation) { return matchNames || matchPrefix; } -function isFF(ruleBasedEntity: IRBSegment | ISplit): ruleBasedEntity is ISplit { - return (ruleBasedEntity as ISplit).defaultTreatment !== undefined; -} - /** * Given the list of splits from /splitChanges endpoint, it returns the mutations, * i.e., an object with added splits, removed splits and used segments. @@ -78,15 +77,15 @@ function isFF(ruleBasedEntity: IRBSegment | ISplit): ruleBasedEntity is ISplit { */ export function computeMutation(rules: Array, segments: Set, filters?: ISplitFiltersValidation): ISplitMutations { - return rules.reduce((accum, ruleBasedEntity) => { - if (ruleBasedEntity.status === 'ACTIVE' && (!filters || matchFilters(ruleBasedEntity as ISplit, filters))) { - accum.added.push(ruleBasedEntity); + return rules.reduce((accum, ruleEntity) => { + if (ruleEntity.status === 'ACTIVE' && (!filters || matchFilters(ruleEntity as ISplit, filters))) { + accum.added.push(ruleEntity); - parseSegments(ruleBasedEntity).forEach((segmentName: string) => { + parseSegments(ruleEntity).forEach((segmentName: string) => { segments.add(segmentName); }); } else { - accum.removed.push(ruleBasedEntity); + accum.removed.push(ruleEntity); } return accum; @@ -116,7 +115,7 @@ export function splitChangesUpdaterFactory( requestTimeoutBeforeReady: number = 0, retriesOnFailureBeforeReady: number = 0, isClientSide?: boolean -): ISplitChangesUpdater { +): SplitChangesUpdater { const { splits, rbSegments, segments } = storage; let startingUp = true; @@ -134,7 +133,7 @@ export function splitChangesUpdaterFactory( * @param noCache - true to revalidate data to fetch * @param till - query param to bypass CDN requests */ - return function splitChangesUpdater(noCache?: boolean, till?: number, updateNotification?: { payload: ISplit | IRBSegment, changeNumber: number }) { + return function splitChangesUpdater(noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) { /** * @param since - current changeNumber at splitsCache @@ -144,15 +143,15 @@ export function splitChangesUpdaterFactory( const [since, rbSince] = sinces; log.debug(SYNC_SPLITS_FETCH, sinces); const fetcherPromise = Promise.resolve( - updateNotification ? - isFF(updateNotification.payload) ? + instantUpdate ? + instantUpdate.type === SPLIT_UPDATE ? // IFFU edge case: a change to a flag that adds an IN_RULE_BASED_SEGMENT matcher that is not present yet - Promise.resolve(rbSegments.contains(parseSegments(updateNotification.payload, IN_RULE_BASED_SEGMENT))).then((contains) => { + Promise.resolve(rbSegments.contains(parseSegments(instantUpdate.payload, IN_RULE_BASED_SEGMENT))).then((contains) => { return contains ? - { ff: { d: [updateNotification.payload as ISplit], t: updateNotification.changeNumber } } : + { ff: { d: [instantUpdate.payload as ISplit], t: instantUpdate.changeNumber } } : splitChangesFetcher(since, noCache, till, rbSince, _promiseDecorator); }) : - { rbs: { d: [updateNotification.payload as IRBSegment], t: updateNotification.changeNumber } } : + { rbs: { d: [instantUpdate.payload as IRBSegment], t: instantUpdate.changeNumber } } : splitChangesFetcher(since, noCache, till, rbSince, _promiseDecorator) ) .then((splitChanges: ISplitChangesResponse) => { @@ -180,14 +179,8 @@ export function splitChangesUpdaterFactory( ]).then(([ffChanged, rbsChanged]) => { if (splitsEventEmitter) { // To emit SDK_SPLITS_ARRIVED for server-side SDK, we must check that all registered segments have been fetched - return Promise.resolve(!splitsEventEmitter.splitsArrived || - ( - (!splitChanges.ff || since !== splitChanges.ff.t) && - (!splitChanges.rbs || rbSince !== splitChanges.rbs.t) && - (ffChanged || rbsChanged) && - (isClientSide || checkAllSegmentsExist(segments)) - ) - ) + return Promise.resolve(!splitsEventEmitter.splitsArrived || ((ffChanged || rbsChanged) && (isClientSide || checkAllSegmentsExist(segments)))) + .catch(() => false /** noop. just to handle a possible `checkAllSegmentsExist` rejection, before emitting SDK event */) .then(emitSplitsArrivedEvent => { // emit SDK events if (emitSplitsArrivedEvent) splitsEventEmitter.emit(SDK_SPLITS_ARRIVED); diff --git a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts index b151477c..ac421269 100644 --- a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts @@ -8,6 +8,7 @@ import { ITelemetryTracker } from '../../../trackers/types'; import { Backoff } from '../../../utils/Backoff'; import { SPLITS } from '../../../utils/constants'; import { ISegmentsSyncTask, ISplitsSyncTask } from '../../polling/types'; +import { InstantUpdate } from '../../polling/updaters/splitChangesUpdater'; import { RB_SEGMENT_UPDATE } from '../constants'; import { parseFFUpdatePayload } from '../parseUtils'; import { ISplitKillData, ISplitUpdateData } from '../SSEHandler/types'; @@ -24,26 +25,26 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, splitsSy let handleNewEvent = false; let isHandlingEvent: boolean; let cdnBypass: boolean; - let payload: ISplit | IRBSegment | undefined; + let instantUpdate: InstantUpdate | undefined; const backoff = new Backoff(__handleSplitUpdateCall, FETCH_BACKOFF_BASE, FETCH_BACKOFF_MAX_WAIT); function __handleSplitUpdateCall() { isHandlingEvent = true; if (maxChangeNumber > cache.getChangeNumber()) { handleNewEvent = false; - const splitUpdateNotification = payload ? { payload, changeNumber: maxChangeNumber } : undefined; // fetch splits revalidating data if cached - splitsSyncTask.execute(true, cdnBypass ? maxChangeNumber : undefined, splitUpdateNotification).then(() => { + splitsSyncTask.execute(true, cdnBypass ? maxChangeNumber : undefined, instantUpdate).then(() => { if (!isHandlingEvent) return; // halt if `stop` has been called if (handleNewEvent) { __handleSplitUpdateCall(); } else { - if (splitUpdateNotification) telemetryTracker.trackUpdatesFromSSE(SPLITS); + if (instantUpdate) telemetryTracker.trackUpdatesFromSSE(SPLITS); // fetch new registered segments for server-side API. Not retrying on error if (segmentsSyncTask) segmentsSyncTask.execute(true); const attempts = backoff.attempts + 1; + // @TODO and with RBS and FF if (maxChangeNumber <= cache.getChangeNumber()) { log.debug(`Refresh completed${cdnBypass ? ' bypassing the CDN' : ''} in ${attempts} attempts.`); isHandlingEvent = false; @@ -76,7 +77,7 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, splitsSy * * @param changeNumber - change number of the notification */ - put({ changeNumber, pcn }: ISplitUpdateData, _payload?: ISplit | IRBSegment) { + put({ changeNumber, pcn, type }: ISplitUpdateData, payload?: ISplit | IRBSegment) { const currentChangeNumber = cache.getChangeNumber(); if (changeNumber <= currentChangeNumber || changeNumber <= maxChangeNumber) return; @@ -84,10 +85,10 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, splitsSy maxChangeNumber = changeNumber; handleNewEvent = true; cdnBypass = false; - payload = undefined; + instantUpdate = undefined; - if (_payload && currentChangeNumber === pcn) { - payload = _payload; + if (payload && currentChangeNumber === pcn) { + instantUpdate = { payload, changeNumber, type }; } if (backoff.timeoutID || !isHandlingEvent) __handleSplitUpdateCall(); diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index 5410c17f..109b71f0 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -149,14 +149,14 @@ export function syncManagerOnlineFactory( if (pushManager) { if (pollingManager.isRunning()) { // if doing polling, we must start the periodic fetch of data - if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); + if (storage.splits.usesSegments() || storage.rbSegments.usesSegments()) mySegmentsSyncTask.start(); } else { // if not polling, we must execute the sync task for the initial fetch // of segments since `syncAll` was already executed when starting the main client mySegmentsSyncTask.execute(); } } else { - if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); + if (storage.splits.usesSegments() || storage.rbSegments.usesSegments()) mySegmentsSyncTask.start(); } } else { if (!readinessManager.isReady()) mySegmentsSyncTask.execute();