Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions client/src/main/java/io/split/client/SplitFactoryImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -200,7 +201,7 @@ public SplitFactoryImpl(String apiToken, SplitClientConfig config) throws URISyn

SplitParser splitParser = new SplitParser();
// SplitFetcher
_splitFetcher = buildSplitFetcher(splitCache, splitParser);
_splitFetcher = buildSplitFetcher(splitCache, splitParser, config.getSetsFilter());

// SplitSynchronizationTask
_splitSynchronizationTask = new SplitSynchronizationTask(_splitFetcher,
Expand Down Expand Up @@ -377,7 +378,7 @@ protected SplitFactoryImpl(SplitClientConfig config) {
SplitChangeFetcher splitChangeFetcher = createSplitChangeFetcher(config);
SplitParser splitParser = new SplitParser();

_splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCache, _telemetryStorageProducer);
_splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCache, _telemetryStorageProducer, config.getSetsFilter());

// SplitSynchronizationTask
_splitSynchronizationTask = new SplitSynchronizationTask(_splitFetcher, splitCache, config.featuresRefreshRate(), config.getThreadFactory());
Expand Down Expand Up @@ -559,10 +560,11 @@ private SegmentSynchronizationTaskImp buildSegments(SplitClientConfig config, Se
config.getThreadFactory());
}

private SplitFetcher buildSplitFetcher(SplitCacheProducer splitCacheProducer, SplitParser splitParser) throws URISyntaxException {
private SplitFetcher buildSplitFetcher(SplitCacheProducer splitCacheProducer, SplitParser splitParser, HashSet<String> flagSets) throws
URISyntaxException {
SplitChangeFetcher splitChangeFetcher = HttpSplitChangeFetcher.create(_httpclient, _rootTarget, _telemetryStorageProducer);

return new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, _telemetryStorageProducer);
return new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, _telemetryStorageProducer,flagSets);
}

private ImpressionsManagerImpl buildImpressionsManager(SplitClientConfig config, ImpressionsStorageConsumer impressionsStorageConsumer,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package io.split.client.interceptors;

import java.util.Set;

public interface FlagSetsFilter {

boolean Intersect(Set<String> sets);
boolean Intersect(String set);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
package io.split.client.interceptors;

import java.util.Set;

public class FlagSetsFilterImpl implements FlagSetsFilter {

private final Set<String> _flagSets;
private final boolean _shouldFilter;

public FlagSetsFilterImpl(Set<String> flagSets) {
_shouldFilter = !flagSets.isEmpty();
_flagSets = flagSets;
}
@Override
public boolean Intersect(Set<String> sets) {
if (!_shouldFilter) {
return true;
}
if (sets == null || sets.isEmpty()) {
return false;
}
for (String set: sets) {
if (_flagSets.contains(set)) {
return true;
}
}
return false;
}

@Override
public boolean Intersect(String set) {
if (!_shouldFilter) {
return true;
}
if (set.isEmpty()){
return false;
}
return _flagSets.contains(set);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

import io.split.client.dtos.Split;
import io.split.client.dtos.Status;
import io.split.client.interceptors.FlagSetsFilter;
import io.split.client.interceptors.FlagSetsFilterImpl;
import io.split.engine.experiments.ParsedSplit;
import io.split.engine.experiments.SplitParser;
import org.slf4j.Logger;
Expand All @@ -15,16 +17,21 @@
public class FeatureFlagProcessor {
private static final Logger _log = LoggerFactory.getLogger(FeatureFlagProcessor.class);

public static FeatureFlagsToUpdate processFeatureFlagChanges(SplitParser splitParser, List<Split> splits) {
public static FeatureFlagsToUpdate processFeatureFlagChanges(SplitParser splitParser, List<Split> splits, HashSet<String> configSets) {
List<ParsedSplit> toAdd = new ArrayList<>();
List<String> toRemove = new ArrayList<>();
Set<String> segments = new HashSet<>();
FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(configSets);
for (Split split : splits) {
if (split.status != Status.ACTIVE) {
// archive.
toRemove.add(split.name);
continue;
}
if (!flagSetsFilter.Intersect(split.sets)) {
toRemove.add(split.name);
continue;
}
ParsedSplit parsedSplit = splitParser.parse(split);
if (parsedSplit == null) {
_log.debug(String.format("We could not parse the feature flag definition for: %s", split.name));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashSet;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
Expand Down Expand Up @@ -74,8 +75,10 @@ public static PushManagerImp build(Synchronizer synchronizer,
TelemetryRuntimeProducer telemetryRuntimeProducer,
ThreadFactory threadFactory,
SplitParser splitParser,
SplitCacheProducer splitCacheProducer) {
FeatureFlagsWorker featureFlagsWorker = new FeatureFlagWorkerImp(synchronizer, splitParser, splitCacheProducer, telemetryRuntimeProducer);
SplitCacheProducer splitCacheProducer,
HashSet<String> flagSets) {
FeatureFlagsWorker featureFlagsWorker = new FeatureFlagWorkerImp(synchronizer, splitParser, splitCacheProducer,
telemetryRuntimeProducer, flagSets);
Worker<SegmentQueueDto> segmentWorker = new SegmentsWorkerImp(synchronizer);
PushStatusTracker pushStatusTracker = new PushStatusTrackerImp(statusMessages, telemetryRuntimeProducer);
return new PushManagerImp(new AuthApiClientImp(authUrl, splitAPI.getHttpClient(), telemetryRuntimeProducer),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,8 @@ public static SyncManagerImp build(SplitTasks splitTasks,
telemetryRuntimeProducer,
config.getThreadFactory(),
splitParser,
splitCacheProducer);
splitCacheProducer,
config.getSetsFilter());

return new SyncManagerImp(splitTasks,
config.streamingEnabled(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;

Expand All @@ -30,6 +31,7 @@ public class SplitFetcherImp implements SplitFetcher {
private final SplitCacheProducer _splitCacheProducer;
private final Object _lock = new Object();
private final TelemetryRuntimeProducer _telemetryRuntimeProducer;
private final HashSet<String> _flagSets;

/**
* Contains all the traffic types that are currently being used by the splits and also the count
Expand All @@ -43,11 +45,12 @@ public class SplitFetcherImp implements SplitFetcher {


public SplitFetcherImp(SplitChangeFetcher splitChangeFetcher, SplitParser parser, SplitCacheProducer splitCacheProducer,
TelemetryRuntimeProducer telemetryRuntimeProducer) {
TelemetryRuntimeProducer telemetryRuntimeProducer, HashSet<String> sets) {
_splitChangeFetcher = checkNotNull(splitChangeFetcher);
_parser = checkNotNull(parser);
_splitCacheProducer = checkNotNull(splitCacheProducer);
_telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer);
_flagSets = sets;
}

@Override
Expand Down Expand Up @@ -118,7 +121,7 @@ private Set<String> runWithoutExceptionHandling(FetchOptions options) throws Int
// some other thread may have updated the shared state. exit
return segments;
}
FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(_parser, change.splits);
FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(_parser, change.splits, _flagSets);
segments = featureFlagsToUpdate.getSegments();
_splitCacheProducer.update(featureFlagsToUpdate.getToAdd(), featureFlagsToUpdate.getToRemove(), change.till);
_telemetryRuntimeProducer.recordSuccessfulSync(LastSynchronizationRecordsEnum.SPLITS, System.currentTimeMillis());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import org.slf4j.LoggerFactory;

import java.util.Collections;
import java.util.HashSet;
import java.util.Set;

import static com.google.common.base.Preconditions.checkNotNull;
Expand All @@ -24,14 +25,16 @@ public class FeatureFlagWorkerImp extends Worker<FeatureFlagChangeNotification>
private final SplitParser _splitParser;
private final SplitCacheProducer _splitCacheProducer;
private final TelemetryRuntimeProducer _telemetryRuntimeProducer;
private final HashSet<String> _flagSets;

public FeatureFlagWorkerImp(Synchronizer synchronizer, SplitParser splitParser, SplitCacheProducer splitCacheProducer,
TelemetryRuntimeProducer telemetryRuntimeProducer) {
TelemetryRuntimeProducer telemetryRuntimeProducer, HashSet<String> flagSets) {
super("Feature flags");
_synchronizer = checkNotNull(synchronizer);
_splitParser = splitParser;
_splitCacheProducer = splitCacheProducer;
_telemetryRuntimeProducer = telemetryRuntimeProducer;
_flagSets = flagSets;
}

@Override
Expand Down Expand Up @@ -61,7 +64,8 @@ private boolean addOrUpdateFeatureFlag(FeatureFlagChangeNotification featureFlag
if (featureFlagChangeNotification.getFeatureFlagDefinition() != null &&
featureFlagChangeNotification.getPreviousChangeNumber() == _splitCacheProducer.getChangeNumber()) {
Split featureFlag = featureFlagChangeNotification.getFeatureFlagDefinition();
FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(_splitParser, Collections.singletonList(featureFlag));
FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(_splitParser, Collections.singletonList(featureFlag),
_flagSets);
_splitCacheProducer.update(featureFlagsToUpdate.getToAdd(), featureFlagsToUpdate.getToRemove(),
featureFlagChangeNotification.getChangeNumber());
Set<String> segments = featureFlagsToUpdate.getSegments();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package io.split.client.interceptors;

import org.junit.Assert;
import org.junit.Test;

import java.util.Arrays;
import java.util.HashSet;

public class FlagSetsFilterImplTest {

@Test
public void testIntersectSetsWithShouldFilter() {
FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>(Arrays.asList("a", "b")));
Assert.assertTrue(flagSetsFilter.Intersect("a"));
Assert.assertTrue(flagSetsFilter.Intersect(new HashSet<>(Arrays.asList("a", "c"))));
Assert.assertFalse(flagSetsFilter.Intersect("c"));
Assert.assertFalse(flagSetsFilter.Intersect(new HashSet<>(Arrays.asList("d", "c"))));
}

@Test
public void testIntersectSetsWithShouldNotFilter() {
FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>());
Assert.assertTrue(flagSetsFilter.Intersect("a"));
Assert.assertTrue(flagSetsFilter.Intersect(new HashSet<>(Arrays.asList("a", "c"))));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import org.junit.Test;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;

import static io.split.client.utils.FeatureFlagProcessor.processFeatureFlagChanges;
Expand All @@ -25,10 +27,50 @@ public void testProcessFeatureFlagChanges() {

featureFlags.add(featureFlagTest1);
featureFlags.add(featureFlagTest2);
FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(splitParser, featureFlags);
FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(splitParser, featureFlags, new HashSet<>());

Assert.assertEquals(1, featureFlagsToUpdate.toAdd.size());
Assert.assertEquals(1, featureFlagsToUpdate.toRemove.size());
Assert.assertEquals(1, featureFlagsToUpdate.segments.size());
}

@Test
public void testProcessFeatureFlagChangesWithSetsToAdd() {
SplitParser splitParser = new SplitParser();
List<Split> featureFlags = new ArrayList<>();

String definition1 = "{\"trafficTypeName\":\"user\",\"id\":\"d431cdd0-b0be-11ea-8a80-1660ada9ce39\",\"name\":\"mauro_java\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-92391491,\"seed\":-1769377604,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1684329854385,\"algo\":2,\"configurations\":{},\"sets\":[\"set_1\",\"set_2\"],\"conditions\":[{\"conditionType\":\"WHITELIST\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"matcherType\":\"WHITELIST\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"admin\",\"mauro\",\"nico\"]}}]},\"partitions\":[{\"treatment\":\"off\",\"size\":100}],\"label\":\"whitelisted\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"maur-2\"}}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"in segment maur-2\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"ALL_KEYS\",\"negate\":false}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"default rule\"}]}";
Split featureFlagTest1 = Json.fromJson(definition1, Split.class);

String definition2 = "{\"trafficTypeName\":\"user\",\"id\":\"d704f220-0567-11ee-80ee-fa3c6460cd13\",\"name\":\"NET_CORE_getTreatmentWithConfigAfterArchive\",\"trafficAllocation\":100,\"trafficAllocationSeed\":179018541,\"seed\":272707374,\"status\":\"ARCHIVED\",\"killed\":false,\"defaultTreatment\":\"V-FGyN\",\"changeNumber\":1686165617166,\"algo\":2,\"configurations\":{\"V-FGyN\":\"{\\\"color\\\":\\\"blue\\\"}\",\"V-YrWB\":\"{\\\"color\\\":\\\"red\\\"}\"},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"test\"},\"matcherType\":\"LESS_THAN_OR_EQUAL_TO\",\"negate\":false,\"unaryNumericMatcherData\":{\"dataType\":\"NUMBER\",\"value\":20}}]},\"partitions\":[{\"treatment\":\"V-FGyN\",\"size\":0},{\"treatment\":\"V-YrWB\",\"size\":100}],\"label\":\"test \\u003c\\u003d 20\"}]}";
Split featureFlagTest2 = Json.fromJson(definition2, Split.class);

featureFlags.add(featureFlagTest1);
featureFlags.add(featureFlagTest2);
FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(splitParser, featureFlags, new HashSet<>(Arrays.asList("set_1")));

Assert.assertEquals(1, featureFlagsToUpdate.toAdd.size());
Assert.assertEquals(1, featureFlagsToUpdate.toRemove.size());
Assert.assertEquals(1, featureFlagsToUpdate.segments.size());
}

@Test
public void testProcessFeatureFlagChangesWithSetsToRemove() {
SplitParser splitParser = new SplitParser();
List<Split> featureFlags = new ArrayList<>();

String definition1 = "{\"trafficTypeName\":\"user\",\"id\":\"d431cdd0-b0be-11ea-8a80-1660ada9ce39\",\"name\":\"mauro_java\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-92391491,\"seed\":-1769377604,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1684329854385,\"algo\":2,\"configurations\":{},\"sets\":[\"set_1\",\"set_2\"],\"conditions\":[{\"conditionType\":\"WHITELIST\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"matcherType\":\"WHITELIST\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"admin\",\"mauro\",\"nico\"]}}]},\"partitions\":[{\"treatment\":\"off\",\"size\":100}],\"label\":\"whitelisted\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"maur-2\"}}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"in segment maur-2\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"ALL_KEYS\",\"negate\":false}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"default rule\"}]}";
Split featureFlagTest1 = Json.fromJson(definition1, Split.class);

String definition2 = "{\"trafficTypeName\":\"user\",\"id\":\"d704f220-0567-11ee-80ee-fa3c6460cd13\",\"name\":\"NET_CORE_getTreatmentWithConfigAfterArchive\",\"trafficAllocation\":100,\"trafficAllocationSeed\":179018541,\"seed\":272707374,\"status\":\"ARCHIVED\",\"killed\":false,\"defaultTreatment\":\"V-FGyN\",\"changeNumber\":1686165617166,\"algo\":2,\"configurations\":{\"V-FGyN\":\"{\\\"color\\\":\\\"blue\\\"}\",\"V-YrWB\":\"{\\\"color\\\":\\\"red\\\"}\"},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"test\"},\"matcherType\":\"LESS_THAN_OR_EQUAL_TO\",\"negate\":false,\"unaryNumericMatcherData\":{\"dataType\":\"NUMBER\",\"value\":20}}]},\"partitions\":[{\"treatment\":\"V-FGyN\",\"size\":0},{\"treatment\":\"V-YrWB\",\"size\":100}],\"label\":\"test \\u003c\\u003d 20\"}]}";
Split featureFlagTest2 = Json.fromJson(definition2, Split.class);

featureFlags.add(featureFlagTest1);
featureFlags.add(featureFlagTest2);
FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(splitParser, featureFlags, new HashSet<>(Arrays.asList("set_3")));

Assert.assertEquals(0, featureFlagsToUpdate.toAdd.size());
Assert.assertEquals(2, featureFlagsToUpdate.toRemove.size());
Assert.assertEquals(0, featureFlagsToUpdate.segments.size());
}
}
Loading