* After configuring this object, use - * {@link LDConfig.Builder#bigSegments(BigSegmentsConfigurationBuilder)} to store it in your SDK + * {@link LDConfig.Builder#bigSegments(ComponentConfigurer)} to store it in your SDK * configuration. For example, using the Redis integration: * *
@@ -67,17 +68,18 @@ private Components() {}
* You must always specify the {@code storeFactory} parameter, to tell the SDK what database you
* are using. Several database integrations exist for the LaunchDarkly SDK, each with its own
* behavior and options specific to that database; this is described via some implementation of
- * {@link BigSegmentStoreFactory}. The {@link BigSegmentsConfigurationBuilder} adds configuration
+ * {@link BigSegmentStore}. The {@link BigSegmentsConfigurationBuilder} adds configuration
* options for aspects of SDK behavior that are independent of the database. In the example above,
* {@code prefix} is an option specifically for the Redis integration, whereas
* {@code userCacheSize} is an option that can be used for any data store type.
*
- * @param storeFactory the factory for the underlying data store
+ * @param storeConfigurer the factory for the underlying data store
* @return a {@link BigSegmentsConfigurationBuilder}
* @since 5.7.0
+ * @see Components#bigSegments(ComponentConfigurer)
*/
- public static BigSegmentsConfigurationBuilder bigSegments(BigSegmentStoreFactory storeFactory) {
- return new BigSegmentsConfigurationBuilder(storeFactory);
+ public static BigSegmentsConfigurationBuilder bigSegments(ComponentConfigurer storeConfigurer) {
+ return new BigSegmentsConfigurationBuilder(storeConfigurer);
}
/**
@@ -87,10 +89,10 @@ public static BigSegmentsConfigurationBuilder bigSegments(BigSegmentStoreFactory
* a data store instance for testing purposes.
*
* @return a factory object
- * @see LDConfig.Builder#dataStore(DataStoreFactory)
+ * @see LDConfig.Builder#dataStore(ComponentConfigurer)
* @since 4.12.0
*/
- public static DataStoreFactory inMemoryDataStore() {
+ public static ComponentConfigurer inMemoryDataStore() {
return InMemoryDataStoreFactory.INSTANCE;
}
@@ -117,13 +119,13 @@ public static DataStoreFactory inMemoryDataStore() {
* For more information on the available persistent data store implementations, see the reference
* guide on Using a persistent feature store.
*
- * @param storeFactory the factory/builder for the specific kind of persistent data store
+ * @param storeConfigurer the factory/builder for the specific kind of persistent data store
* @return a {@link PersistentDataStoreBuilder}
- * @see LDConfig.Builder#dataStore(DataStoreFactory)
+ * @see LDConfig.Builder#dataStore(ComponentConfigurer)
* @since 4.12.0
*/
- public static PersistentDataStoreBuilder persistentDataStore(PersistentDataStoreFactory storeFactory) {
- return new PersistentDataStoreBuilderImpl(storeFactory);
+ public static PersistentDataStoreBuilder persistentDataStore(ComponentConfigurer storeConfigurer) {
+ return new PersistentDataStoreBuilderImpl(storeConfigurer);
}
/**
@@ -131,7 +133,7 @@ public static PersistentDataStoreBuilder persistentDataStore(PersistentDataStore
*
* The default configuration has events enabled with default settings. If you want to
* customize this behavior, call this method to obtain a builder, change its properties
- * with the {@link EventProcessorBuilder} properties, and pass it to {@link LDConfig.Builder#events(EventProcessorFactory)}:
+ * with the {@link EventProcessorBuilder} properties, and pass it to {@link LDConfig.Builder#events(ComponentConfigurer)}:
*
* LDConfig config = new LDConfig.Builder()
* .events(Components.sendEvents().capacity(5000).flushIntervalSeconds(2))
@@ -154,7 +156,7 @@ public static EventProcessorBuilder sendEvents() {
/**
* Returns a configuration object that disables analytics events.
*
- * Passing this to {@link LDConfig.Builder#events(EventProcessorFactory)} causes the SDK
+ * Passing this to {@link LDConfig.Builder#events(ComponentConfigurer)} causes the SDK
* to discard all analytics events and not send them to LaunchDarkly, regardless of any other configuration.
*
* LDConfig config = new LDConfig.Builder()
@@ -164,26 +166,20 @@ public static EventProcessorBuilder sendEvents() {
*
* @return a factory object
* @see #sendEvents()
- * @see LDConfig.Builder#events(EventProcessorFactory)
+ * @see LDConfig.Builder#events(ComponentConfigurer)
* @since 4.12.0
*/
- public static EventProcessorFactory noEvents() {
+ public static ComponentConfigurer noEvents() {
return NULL_EVENT_PROCESSOR_FACTORY;
}
- // package-private method for verifying that the given EventProcessorFactory is the same kind that is
- // returned by noEvents() - we can use reference equality here because we know we're using a static instance
- static boolean isNullImplementation(EventProcessorFactory f) {
- return f == NULL_EVENT_PROCESSOR_FACTORY;
- }
-
/**
* Returns a configurable factory for using streaming mode to get feature flag data.
*
* By default, the SDK uses a streaming connection to receive feature flag data from LaunchDarkly. To use the
* default behavior, you do not need to call this method. However, if you want to customize the behavior of
* the connection, call this method to obtain a builder, change its properties with the
- * {@link StreamingDataSourceBuilder} methods, and pass it to {@link LDConfig.Builder#dataSource(DataSourceFactory)}:
+ * {@link StreamingDataSourceBuilder} methods, and pass it to {@link LDConfig.Builder#dataSource(ComponentConfigurer)}:
*
* LDConfig config = new LDConfig.Builder()
* .dataSource(Components.streamingDataSource().initialReconnectDelayMillis(500))
@@ -194,7 +190,7 @@ static boolean isNullImplementation(EventProcessorFactory f) {
* disable network requests.
*
* @return a builder for setting streaming connection properties
- * @see LDConfig.Builder#dataSource(DataSourceFactory)
+ * @see LDConfig.Builder#dataSource(ComponentConfigurer)
* @since 4.12.0
*/
public static StreamingDataSourceBuilder streamingDataSource() {
@@ -210,7 +206,7 @@ public static StreamingDataSourceBuilder streamingDataSource() {
* polling is still less efficient than streaming and should only be used on the advice of LaunchDarkly support.
*
* To use polling mode, call this method to obtain a builder, change its properties with the
- * {@link PollingDataSourceBuilder} methods, and pass it to {@link LDConfig.Builder#dataSource(DataSourceFactory)}:
+ * {@link PollingDataSourceBuilder} methods, and pass it to {@link LDConfig.Builder#dataSource(ComponentConfigurer)}:
*
* LDConfig config = new LDConfig.Builder()
* .dataSource(Components.pollingDataSource().pollIntervalMillis(45000))
@@ -221,7 +217,7 @@ public static StreamingDataSourceBuilder streamingDataSource() {
* disable network requests.
*
* @return a builder for setting polling properties
- * @see LDConfig.Builder#dataSource(DataSourceFactory)
+ * @see LDConfig.Builder#dataSource(ComponentConfigurer)
* @since 4.12.0
*/
public static PollingDataSourceBuilder pollingDataSource() {
@@ -236,7 +232,7 @@ static PollingDataSourceBuilderImpl pollingDataSourceInternal() {
/**
* Returns a configuration object that disables a direct connection with LaunchDarkly for feature flag updates.
*
- * Passing this to {@link LDConfig.Builder#dataSource(DataSourceFactory)} causes the SDK
+ * Passing this to {@link LDConfig.Builder#dataSource(ComponentConfigurer)} causes the SDK
* not to retrieve feature flag data from LaunchDarkly, regardless of any other configuration.
* This is normally done if you are using the Relay Proxy
* in "daemon mode", where an external process-- the Relay Proxy-- connects to LaunchDarkly and populates
@@ -253,16 +249,16 @@ static PollingDataSourceBuilderImpl pollingDataSourceInternal() {
*
* @return a factory object
* @since 4.12.0
- * @see LDConfig.Builder#dataSource(DataSourceFactory)
+ * @see LDConfig.Builder#dataSource(ComponentConfigurer)
*/
- public static DataSourceFactory externalUpdatesOnly() {
+ public static ComponentConfigurer externalUpdatesOnly() {
return NullDataSourceFactory.INSTANCE;
}
/**
* Returns a configuration builder for the SDK's networking configuration.
*
- * Passing this to {@link LDConfig.Builder#http(com.launchdarkly.sdk.server.interfaces.HttpConfigurationFactory)}
+ * Passing this to {@link LDConfig.Builder#http(ComponentConfigurer)}
* applies this configuration to all HTTP/HTTPS requests made by the SDK.
*
* LDConfig config = new LDConfig.Builder()
@@ -276,7 +272,7 @@ public static DataSourceFactory externalUpdatesOnly() {
*
* @return a factory object
* @since 4.13.0
- * @see LDConfig.Builder#http(com.launchdarkly.sdk.server.interfaces.HttpConfigurationFactory)
+ * @see LDConfig.Builder#http(ComponentConfigurer)
*/
public static HttpConfigurationBuilder httpConfiguration() {
return new HttpConfigurationBuilderImpl();
@@ -307,7 +303,7 @@ public static HttpAuthentication httpBasicAuthentication(String username, String
/**
* Returns a configuration builder for the SDK's logging configuration.
*
- * Passing this to {@link LDConfig.Builder#logging(com.launchdarkly.sdk.server.interfaces.LoggingConfigurationFactory)},
+ * Passing this to {@link LDConfig.Builder#logging(ComponentConfigurer)},
* after setting any desired properties on the builder, applies this configuration to the SDK.
*
* LDConfig config = new LDConfig.Builder()
@@ -320,7 +316,7 @@ public static HttpAuthentication httpBasicAuthentication(String username, String
*
* @return a configuration builder
* @since 5.0.0
- * @see LDConfig.Builder#logging(com.launchdarkly.sdk.server.interfaces.LoggingConfigurationFactory)
+ * @see LDConfig.Builder#logging(ComponentConfigurer)
*/
public static LoggingConfigurationBuilder logging() {
return new LoggingConfigurationBuilderImpl();
@@ -332,15 +328,20 @@ public static LoggingConfigurationBuilder logging() {
*
* This is a shortcut for Components.logging().adapter(logAdapter)
. The
* com.launchdarkly.logging
- * API defines the {@link LDLogAdapter} interface to specify where log output should be sent. By default,
- * it is set to {@link com.launchdarkly.logging.LDSLF4J#adapter()}, meaning that output will be sent to
- * SLF4J and controlled by the SLF4J configuration. You may use
- * the {@link com.launchdarkly.logging.Logs} factory methods, or a custom implementation, to handle log
- * output differently. For instance, you may specify {@link com.launchdarkly.logging.Logs#basic()} for
- * simple console output, or {@link com.launchdarkly.logging.Logs#toJavaUtilLogging()} to use the
- * java.util.logging
framework.
+ * API defines the {@link LDLogAdapter} interface to specify where log output should be sent.
+ *
+ * The default logging destination, if no adapter is specified, depends on whether
+ * SLF4J is present in the classpath. If it is, then the SDK uses
+ * {@link com.launchdarkly.logging.LDSLF4J#adapter()}, causing output to go to SLF4J; what happens to
+ * the output then is determined by the SLF4J configuration. If SLF4J is not present in the classpath,
+ * the SDK uses {@link Logs#toConsole()} instead, causing output to go to the {@code System.err} stream.
+ *
+ * You may use the {@link com.launchdarkly.logging.Logs} factory methods, or a custom implementation,
+ * to handle log output differently. For instance, you may specify
+ * {@link com.launchdarkly.logging.Logs#toJavaUtilLogging()} to use the java.util.logging
+ * framework.
*
- * Passing this to {@link LDConfig.Builder#logging(com.launchdarkly.sdk.server.interfaces.LoggingConfigurationFactory)},
+ * Passing this to {@link LDConfig.Builder#logging(ComponentConfigurer)},
* after setting any desired properties on the builder, applies this configuration to the SDK.
*
* LDConfig config = new LDConfig.Builder()
@@ -353,7 +354,7 @@ public static LoggingConfigurationBuilder logging() {
* @param logAdapter the log adapter
* @return a configuration builder
* @since 5.10.0
- * @see LDConfig.Builder#logging(com.launchdarkly.sdk.server.interfaces.LoggingConfigurationFactory)
+ * @see LDConfig.Builder#logging(ComponentConfigurer)
* @see LoggingConfigurationBuilder#adapter(LDLogAdapter)
*/
public static LoggingConfigurationBuilder logging(LDLogAdapter logAdapter) {
@@ -363,7 +364,7 @@ public static LoggingConfigurationBuilder logging(LDLogAdapter logAdapter) {
/**
* Returns a configuration builder that turns off SDK logging.
*
- * Passing this to {@link LDConfig.Builder#logging(com.launchdarkly.sdk.server.interfaces.LoggingConfigurationFactory)}
+ * Passing this to {@link LDConfig.Builder#logging(ComponentConfigurer)}
* applies this configuration to the SDK.
*
* It is equivalent to Components.logging(com.launchdarkly.logging.Logs.none())
.
diff --git a/src/main/java/com/launchdarkly/sdk/server/ComponentsImpl.java b/src/main/java/com/launchdarkly/sdk/server/ComponentsImpl.java
index 3e87d7103..e4a82499b 100644
--- a/src/main/java/com/launchdarkly/sdk/server/ComponentsImpl.java
+++ b/src/main/java/com/launchdarkly/sdk/server/ComponentsImpl.java
@@ -6,8 +6,14 @@
import com.launchdarkly.logging.LDLogger;
import com.launchdarkly.logging.LDSLF4J;
import com.launchdarkly.logging.Logs;
+import com.launchdarkly.sdk.EvaluationReason;
+import com.launchdarkly.sdk.LDContext;
import com.launchdarkly.sdk.LDValue;
-import com.launchdarkly.sdk.server.DiagnosticEvent.ConfigProperty;
+import com.launchdarkly.sdk.internal.events.DefaultEventSender;
+import com.launchdarkly.sdk.internal.events.DiagnosticConfigProperty;
+import com.launchdarkly.sdk.internal.events.EventSender;
+import com.launchdarkly.sdk.internal.events.EventsConfiguration;
+import com.launchdarkly.sdk.internal.http.HttpProperties;
import com.launchdarkly.sdk.server.integrations.EventProcessorBuilder;
import com.launchdarkly.sdk.server.integrations.HttpConfigurationBuilder;
import com.launchdarkly.sdk.server.integrations.LoggingConfigurationBuilder;
@@ -15,27 +21,18 @@
import com.launchdarkly.sdk.server.integrations.PollingDataSourceBuilder;
import com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder;
import com.launchdarkly.sdk.server.integrations.StreamingDataSourceBuilder;
-import com.launchdarkly.sdk.server.interfaces.BasicConfiguration;
-import com.launchdarkly.sdk.server.interfaces.ClientContext;
-import com.launchdarkly.sdk.server.interfaces.DataSource;
-import com.launchdarkly.sdk.server.interfaces.DataSourceFactory;
import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider;
-import com.launchdarkly.sdk.server.interfaces.DataSourceUpdates;
-import com.launchdarkly.sdk.server.interfaces.DataStore;
-import com.launchdarkly.sdk.server.interfaces.DataStoreFactory;
-import com.launchdarkly.sdk.server.interfaces.DataStoreUpdates;
-import com.launchdarkly.sdk.server.interfaces.DiagnosticDescription;
-import com.launchdarkly.sdk.server.interfaces.Event;
-import com.launchdarkly.sdk.server.interfaces.EventProcessor;
-import com.launchdarkly.sdk.server.interfaces.EventProcessorFactory;
-import com.launchdarkly.sdk.server.interfaces.EventSender;
-import com.launchdarkly.sdk.server.interfaces.EventSenderFactory;
import com.launchdarkly.sdk.server.interfaces.HttpAuthentication;
-import com.launchdarkly.sdk.server.interfaces.HttpConfiguration;
-import com.launchdarkly.sdk.server.interfaces.LoggingConfiguration;
-import com.launchdarkly.sdk.server.interfaces.PersistentDataStore;
-import com.launchdarkly.sdk.server.interfaces.PersistentDataStoreFactory;
import com.launchdarkly.sdk.server.interfaces.ServiceEndpoints;
+import com.launchdarkly.sdk.server.subsystems.ClientContext;
+import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer;
+import com.launchdarkly.sdk.server.subsystems.DataSource;
+import com.launchdarkly.sdk.server.subsystems.DataStore;
+import com.launchdarkly.sdk.server.subsystems.DiagnosticDescription;
+import com.launchdarkly.sdk.server.subsystems.EventProcessor;
+import com.launchdarkly.sdk.server.subsystems.HttpConfiguration;
+import com.launchdarkly.sdk.server.subsystems.LoggingConfiguration;
+import com.launchdarkly.sdk.server.subsystems.PersistentDataStore;
import java.io.IOException;
import java.net.InetSocketAddress;
@@ -54,20 +51,20 @@
abstract class ComponentsImpl {
private ComponentsImpl() {}
- static final class InMemoryDataStoreFactory implements DataStoreFactory, DiagnosticDescription {
- static final DataStoreFactory INSTANCE = new InMemoryDataStoreFactory();
+ static final class InMemoryDataStoreFactory implements ComponentConfigurer, DiagnosticDescription {
+ static final InMemoryDataStoreFactory INSTANCE = new InMemoryDataStoreFactory();
@Override
- public DataStore createDataStore(ClientContext context, DataStoreUpdates dataStoreUpdates) {
+ public DataStore build(ClientContext context) {
return new InMemoryDataStore();
}
@Override
- public LDValue describeConfiguration(BasicConfiguration basicConfiguration) {
+ public LDValue describeConfiguration(ClientContext clientContext) {
return LDValue.of("memory");
}
}
- static final EventProcessorFactory NULL_EVENT_PROCESSOR_FACTORY = context -> NullEventProcessor.INSTANCE;
+ static final ComponentConfigurer NULL_EVENT_PROCESSOR_FACTORY = context -> NullEventProcessor.INSTANCE;
/**
* Stub implementation of {@link EventProcessor} for when we don't want to send any events.
@@ -78,47 +75,52 @@ static final class NullEventProcessor implements EventProcessor {
private NullEventProcessor() {}
@Override
- public void sendEvent(Event e) {
- }
+ public void flush() {}
@Override
- public void flush() {
- }
-
+ public void close() {}
+
@Override
- public void close() {
- }
+ public void recordEvaluationEvent(LDContext context, String flagKey, int flagVersion, int variation, LDValue value,
+ EvaluationReason reason, LDValue defaultValue, String prerequisiteOfFlagKey, boolean requireFullEvent,
+ Long debugEventsUntilDate) {}
+
+ @Override
+ public void recordIdentifyEvent(LDContext context) {}
+
+ @Override
+ public void recordCustomEvent(LDContext context, String eventKey, LDValue data, Double metricValue) {}
}
- static final class NullDataSourceFactory implements DataSourceFactory, DiagnosticDescription {
+ static final class NullDataSourceFactory implements ComponentConfigurer, DiagnosticDescription {
static final NullDataSourceFactory INSTANCE = new NullDataSourceFactory();
@Override
- public DataSource createDataSource(ClientContext context, DataSourceUpdates dataSourceUpdates) {
- LDLogger logger = context.getBasic().getBaseLogger();
- if (context.getBasic().isOffline()) {
+ public DataSource build(ClientContext context) {
+ LDLogger logger = context.getBaseLogger();
+ if (context.isOffline()) {
// If they have explicitly called offline(true) to disable everything, we'll log this slightly
// more specific message.
logger.info("Starting LaunchDarkly client in offline mode");
} else {
logger.info("LaunchDarkly client will not connect to Launchdarkly for feature flag data");
}
- dataSourceUpdates.updateStatus(DataSourceStatusProvider.State.VALID, null);
+ context.getDataSourceUpdateSink().updateStatus(DataSourceStatusProvider.State.VALID, null);
return NullDataSource.INSTANCE;
}
@Override
- public LDValue describeConfiguration(BasicConfiguration basicConfiguration) {
+ public LDValue describeConfiguration(ClientContext clientContext) {
// The difference between "offline" and "using the Relay daemon" is irrelevant from the data source's
// point of view, but we describe them differently in diagnostic events. This is easy because if we were
// configured to be completely offline... we wouldn't be sending any diagnostic events. Therefore, if
// Components.externalUpdatesOnly() was specified as the data source and we are sending a diagnostic
// event, we can assume usingRelayDaemon should be true.
return LDValue.buildObject()
- .put(ConfigProperty.CUSTOM_BASE_URI.name, false)
- .put(ConfigProperty.CUSTOM_STREAM_URI.name, false)
- .put(ConfigProperty.STREAMING_DISABLED.name, false)
- .put(ConfigProperty.USING_RELAY_DAEMON.name, true)
+ .put(DiagnosticConfigProperty.CUSTOM_BASE_URI.name, false)
+ .put(DiagnosticConfigProperty.CUSTOM_STREAM_URI.name, false)
+ .put(DiagnosticConfigProperty.STREAMING_DISABLED.name, false)
+ .put(DiagnosticConfigProperty.USING_RELAY_DAEMON.name, true)
.build();
}
}
@@ -143,24 +145,23 @@ public void close() throws IOException {}
static final class StreamingDataSourceBuilderImpl extends StreamingDataSourceBuilder
implements DiagnosticDescription {
@Override
- public DataSource createDataSource(ClientContext context, DataSourceUpdates dataSourceUpdates) {
- LDLogger baseLogger = context.getBasic().getBaseLogger();
+ public DataSource build(ClientContext context) {
+ LDLogger baseLogger = context.getBaseLogger();
LDLogger logger = baseLogger.subLogger(Loggers.DATA_SOURCE_LOGGER_NAME);
logger.info("Enabling streaming API");
URI streamUri = StandardEndpoints.selectBaseUri(
- context.getBasic().getServiceEndpoints().getStreamingBaseUri(),
- baseURI,
+ context.getServiceEndpoints().getStreamingBaseUri(),
StandardEndpoints.DEFAULT_STREAMING_BASE_URI,
"streaming",
baseLogger
);
return new StreamProcessor(
- context.getHttp(),
- dataSourceUpdates,
- context.getBasic().getThreadPriority(),
- ClientContextImpl.get(context).diagnosticAccumulator,
+ toHttpProperties(context.getHttp()),
+ context.getDataSourceUpdateSink(),
+ context.getThreadPriority(),
+ ClientContextImpl.get(context).diagnosticStore,
streamUri,
initialReconnectDelay,
logger
@@ -168,17 +169,16 @@ public DataSource createDataSource(ClientContext context, DataSourceUpdates data
}
@Override
- public LDValue describeConfiguration(BasicConfiguration basicConfiguration) {
+ public LDValue describeConfiguration(ClientContext clientContext) {
return LDValue.buildObject()
- .put(ConfigProperty.STREAMING_DISABLED.name, false)
- .put(ConfigProperty.CUSTOM_BASE_URI.name, false)
- .put(ConfigProperty.CUSTOM_STREAM_URI.name,
+ .put(DiagnosticConfigProperty.STREAMING_DISABLED.name, false)
+ .put(DiagnosticConfigProperty.CUSTOM_BASE_URI.name, false)
+ .put(DiagnosticConfigProperty.CUSTOM_STREAM_URI.name,
StandardEndpoints.isCustomBaseUri(
- basicConfiguration.getServiceEndpoints().getStreamingBaseUri(),
- baseURI,
+ clientContext.getServiceEndpoints().getStreamingBaseUri(),
StandardEndpoints.DEFAULT_STREAMING_BASE_URI))
- .put(ConfigProperty.RECONNECT_TIME_MILLIS.name, initialReconnectDelay.toMillis())
- .put(ConfigProperty.USING_RELAY_DAEMON.name, false)
+ .put(DiagnosticConfigProperty.RECONNECT_TIME_MILLIS.name, initialReconnectDelay.toMillis())
+ .put(DiagnosticConfigProperty.USING_RELAY_DAEMON.name, false)
.build();
}
}
@@ -191,25 +191,24 @@ PollingDataSourceBuilderImpl pollIntervalWithNoMinimum(Duration pollInterval) {
}
@Override
- public DataSource createDataSource(ClientContext context, DataSourceUpdates dataSourceUpdates) {
- LDLogger baseLogger = context.getBasic().getBaseLogger();
+ public DataSource build(ClientContext context) {
+ LDLogger baseLogger = context.getBaseLogger();
LDLogger logger = baseLogger.subLogger(Loggers.DATA_SOURCE_LOGGER_NAME);
logger.info("Disabling streaming API");
logger.warn("You should only disable the streaming API if instructed to do so by LaunchDarkly support");
URI pollUri = StandardEndpoints.selectBaseUri(
- context.getBasic().getServiceEndpoints().getPollingBaseUri(),
- baseURI,
+ context.getServiceEndpoints().getPollingBaseUri(),
StandardEndpoints.DEFAULT_POLLING_BASE_URI,
"polling",
baseLogger
);
- DefaultFeatureRequestor requestor = new DefaultFeatureRequestor(context.getHttp(), pollUri, logger);
+ DefaultFeatureRequestor requestor = new DefaultFeatureRequestor(toHttpProperties(context.getHttp()), pollUri, logger);
return new PollingProcessor(
requestor,
- dataSourceUpdates,
+ context.getDataSourceUpdateSink(),
ClientContextImpl.get(context).sharedExecutor,
pollInterval,
logger
@@ -217,17 +216,16 @@ public DataSource createDataSource(ClientContext context, DataSourceUpdates data
}
@Override
- public LDValue describeConfiguration(BasicConfiguration basicConfiguration) {
+ public LDValue describeConfiguration(ClientContext clientContext) {
return LDValue.buildObject()
- .put(ConfigProperty.STREAMING_DISABLED.name, true)
- .put(ConfigProperty.CUSTOM_BASE_URI.name,
+ .put(DiagnosticConfigProperty.STREAMING_DISABLED.name, true)
+ .put(DiagnosticConfigProperty.CUSTOM_BASE_URI.name,
StandardEndpoints.isCustomBaseUri(
- basicConfiguration.getServiceEndpoints().getPollingBaseUri(),
- baseURI,
+ clientContext.getServiceEndpoints().getPollingBaseUri(),
StandardEndpoints.DEFAULT_POLLING_BASE_URI))
- .put(ConfigProperty.CUSTOM_STREAM_URI.name, false)
- .put(ConfigProperty.POLLING_INTERVAL_MILLIS.name, pollInterval.toMillis())
- .put(ConfigProperty.USING_RELAY_DAEMON.name, false)
+ .put(DiagnosticConfigProperty.CUSTOM_STREAM_URI.name, false)
+ .put(DiagnosticConfigProperty.POLLING_INTERVAL_MILLIS.name, pollInterval.toMillis())
+ .put(DiagnosticConfigProperty.USING_RELAY_DAEMON.name, false)
.build();
}
}
@@ -235,74 +233,104 @@ public LDValue describeConfiguration(BasicConfiguration basicConfiguration) {
static final class EventProcessorBuilderImpl extends EventProcessorBuilder
implements DiagnosticDescription {
@Override
- public EventProcessor createEventProcessor(ClientContext context) {
- LDLogger baseLogger = context.getBasic().getBaseLogger();
- LDLogger logger = baseLogger.subLogger(Loggers.EVENTS_LOGGER_NAME);
- EventSenderFactory senderFactory =
- eventSenderFactory == null ? new DefaultEventSender.Factory() : eventSenderFactory;
- EventSender eventSender = senderFactory.createEventSender(
- context.getBasic(),
- context.getHttp(),
- logger
- );
+ public EventProcessor build(ClientContext context) {
+ EventSender eventSender;
+ if (eventSenderConfigurer == null) {
+ eventSender = new DefaultEventSender(
+ toHttpProperties(context.getHttp()),
+ null, // use default request path for server-side events
+ null, // use default request path for client-side events
+ 0, // 0 means default retry delay
+ context.getBaseLogger().subLogger(Loggers.EVENTS_LOGGER_NAME)
+ );
+ } else {
+ eventSender = new EventSenderWrapper(eventSenderConfigurer.build(context));
+ }
URI eventsUri = StandardEndpoints.selectBaseUri(
- context.getBasic().getServiceEndpoints().getEventsBaseUri(),
- baseURI,
+ context.getServiceEndpoints().getEventsBaseUri(),
StandardEndpoints.DEFAULT_EVENTS_BASE_URI,
"events",
- baseLogger
+ context.getBaseLogger()
);
- return new DefaultEventProcessor(
- new EventsConfiguration(
- allAttributesPrivate,
- capacity,
- eventSender,
- eventsUri,
- flushInterval,
- inlineUsersInEvents,
- privateAttributes,
- userKeysCapacity,
- userKeysFlushInterval,
- diagnosticRecordingInterval
- ),
- ClientContextImpl.get(context).sharedExecutor,
- context.getBasic().getThreadPriority(),
- ClientContextImpl.get(context).diagnosticAccumulator,
- ClientContextImpl.get(context).diagnosticInitEvent,
- logger
+ EventsConfiguration eventsConfig = new EventsConfiguration(
+ allAttributesPrivate,
+ capacity,
+ new ServerSideEventContextDeduplicator(userKeysCapacity, userKeysFlushInterval),
+ diagnosticRecordingInterval.toMillis(),
+ ClientContextImpl.get(context).diagnosticStore,
+ eventSender,
+ EventsConfiguration.DEFAULT_EVENT_SENDING_THREAD_POOL_SIZE,
+ eventsUri,
+ flushInterval.toMillis(),
+ false,
+ false,
+ privateAttributes
);
+ return new DefaultEventProcessorWrapper(context, eventsConfig);
}
@Override
- public LDValue describeConfiguration(BasicConfiguration basicConfiguration) {
+ public LDValue describeConfiguration(ClientContext clientContext) {
return LDValue.buildObject()
- .put(ConfigProperty.ALL_ATTRIBUTES_PRIVATE.name, allAttributesPrivate)
- .put(ConfigProperty.CUSTOM_EVENTS_URI.name,
+ .put(DiagnosticConfigProperty.ALL_ATTRIBUTES_PRIVATE.name, allAttributesPrivate)
+ .put(DiagnosticConfigProperty.CUSTOM_EVENTS_URI.name,
StandardEndpoints.isCustomBaseUri(
- basicConfiguration.getServiceEndpoints().getEventsBaseUri(),
- baseURI,
+ clientContext.getServiceEndpoints().getEventsBaseUri(),
StandardEndpoints.DEFAULT_EVENTS_BASE_URI))
- .put(ConfigProperty.DIAGNOSTIC_RECORDING_INTERVAL_MILLIS.name, diagnosticRecordingInterval.toMillis())
- .put(ConfigProperty.EVENTS_CAPACITY.name, capacity)
- .put(ConfigProperty.EVENTS_FLUSH_INTERVAL_MILLIS.name, flushInterval.toMillis())
- .put(ConfigProperty.INLINE_USERS_IN_EVENTS.name, inlineUsersInEvents)
- .put(ConfigProperty.SAMPLING_INTERVAL.name, 0)
- .put(ConfigProperty.USER_KEYS_CAPACITY.name, userKeysCapacity)
- .put(ConfigProperty.USER_KEYS_FLUSH_INTERVAL_MILLIS.name, userKeysFlushInterval.toMillis())
+ .put(DiagnosticConfigProperty.DIAGNOSTIC_RECORDING_INTERVAL_MILLIS.name, diagnosticRecordingInterval.toMillis())
+ .put(DiagnosticConfigProperty.EVENTS_CAPACITY.name, capacity)
+ .put(DiagnosticConfigProperty.EVENTS_FLUSH_INTERVAL_MILLIS.name, flushInterval.toMillis())
+ .put(DiagnosticConfigProperty.SAMPLING_INTERVAL.name, 0)
+ .put(DiagnosticConfigProperty.USER_KEYS_CAPACITY.name, userKeysCapacity)
+ .put(DiagnosticConfigProperty.USER_KEYS_FLUSH_INTERVAL_MILLIS.name, userKeysFlushInterval.toMillis())
.build();
}
+
+ static final class EventSenderWrapper implements EventSender {
+ private final com.launchdarkly.sdk.server.subsystems.EventSender wrappedSender;
+
+ EventSenderWrapper(com.launchdarkly.sdk.server.subsystems.EventSender wrappedSender) {
+ this.wrappedSender = wrappedSender;
+ }
+
+ @Override
+ public void close() throws IOException {
+ wrappedSender.close();
+ }
+
+ @Override
+ public Result sendAnalyticsEvents(byte[] data, int eventCount, URI eventsBaseUri) {
+ return transformResult(wrappedSender.sendAnalyticsEvents(data, eventCount, eventsBaseUri));
+ }
+
+ @Override
+ public Result sendDiagnosticEvent(byte[] data, URI eventsBaseUri) {
+ return transformResult(wrappedSender.sendDiagnosticEvent(data, eventsBaseUri));
+ }
+
+ private Result transformResult(com.launchdarkly.sdk.server.subsystems.EventSender.Result result) {
+ switch (result) {
+ case FAILURE:
+ return new Result(false, false, null);
+ case STOP:
+ return new Result(false, true, null);
+ default:
+ return new Result(true, false, null);
+ }
+ }
+ }
}
static final class HttpConfigurationBuilderImpl extends HttpConfigurationBuilder {
@Override
- public HttpConfiguration createHttpConfiguration(BasicConfiguration basicConfiguration) {
- LDLogger logger = basicConfiguration.getBaseLogger();
+ public HttpConfiguration build(ClientContext clientContext) {
+ LDLogger logger = clientContext.getBaseLogger();
// Build the default headers
ImmutableMap.Builder headers = ImmutableMap.builder();
- headers.put("Authorization", basicConfiguration.getSdkKey());
+ headers.put("Authorization", clientContext.getSdkKey());
headers.put("User-Agent", "JavaClient/" + Version.SDK_VERSION);
- if (basicConfiguration.getApplicationInfo() != null) {
- String tagHeader = Util.applicationTagHeader(basicConfiguration.getApplicationInfo(), logger);
+ if (clientContext.getApplicationInfo() != null) {
+ String tagHeader = Util.applicationTagHeader(clientContext.getApplicationInfo(), logger);
if (!tagHeader.isEmpty()) {
headers.put("X-LaunchDarkly-Tags", tagHeader);
}
@@ -314,15 +342,15 @@ public HttpConfiguration createHttpConfiguration(BasicConfiguration basicConfigu
Proxy proxy = proxyHost == null ? null : new Proxy(Proxy.Type.HTTP, new InetSocketAddress(proxyHost, proxyPort));
- return new HttpConfigurationImpl(
+ return new HttpConfiguration(
connectTimeout,
+ headers.build(),
proxy,
proxyAuth,
- socketTimeout,
socketFactory,
+ socketTimeout,
sslSocketFactory,
- trustManager,
- headers.build()
+ trustManager
);
}
}
@@ -343,47 +371,54 @@ public String provideAuthorization(Iterable challenges) {
}
static final class PersistentDataStoreBuilderImpl extends PersistentDataStoreBuilder implements DiagnosticDescription {
- public PersistentDataStoreBuilderImpl(PersistentDataStoreFactory persistentDataStoreFactory) {
- super(persistentDataStoreFactory);
+ public PersistentDataStoreBuilderImpl(ComponentConfigurer storeConfigurer) {
+ super(storeConfigurer);
}
@Override
- public LDValue describeConfiguration(BasicConfiguration basicConfiguration) {
- if (persistentDataStoreFactory instanceof DiagnosticDescription) {
- return ((DiagnosticDescription)persistentDataStoreFactory).describeConfiguration(basicConfiguration);
+ public LDValue describeConfiguration(ClientContext clientContext) {
+ if (persistentDataStoreConfigurer instanceof DiagnosticDescription) {
+ return ((DiagnosticDescription)persistentDataStoreConfigurer).describeConfiguration(clientContext);
}
return LDValue.of("custom");
}
- /**
- * Called by the SDK to create the data store instance.
- */
@Override
- public DataStore createDataStore(ClientContext context, DataStoreUpdates dataStoreUpdates) {
- PersistentDataStore core = persistentDataStoreFactory.createPersistentDataStore(context);
+ public DataStore build(ClientContext context) {
+ PersistentDataStore core = persistentDataStoreConfigurer.build(context);
return new PersistentDataStoreWrapper(
core,
cacheTime,
staleValuesPolicy,
recordCacheStats,
- dataStoreUpdates,
+ context.getDataStoreUpdateSink(),
ClientContextImpl.get(context).sharedExecutor,
- context.getBasic().getBaseLogger().subLogger(Loggers.DATA_STORE_LOGGER_NAME)
+ context.getBaseLogger().subLogger(Loggers.DATA_STORE_LOGGER_NAME)
);
}
}
static final class LoggingConfigurationBuilderImpl extends LoggingConfigurationBuilder {
@Override
- public LoggingConfiguration createLoggingConfiguration(BasicConfiguration basicConfiguration) {
- LDLogAdapter adapter = logAdapter == null ? LDSLF4J.adapter() : logAdapter;
+ public LoggingConfiguration build(ClientContext clientContext) {
+ LDLogAdapter adapter = logAdapter == null ? getDefaultLogAdapter() : logAdapter;
LDLogAdapter filteredAdapter = Logs.level(adapter,
minimumLevel == null ? LDLogLevel.INFO : minimumLevel);
// If the adapter is for a framework like SLF4J or java.util.logging that has its own external
// configuration system, then calling Logs.level here has no effect and filteredAdapter will be
// just the same as adapter.
String name = baseName == null ? Loggers.BASE_LOGGER_NAME : baseName;
- return new LoggingConfigurationImpl(name, filteredAdapter, logDataSourceOutageAsErrorAfter);
+ return new LoggingConfiguration(name, filteredAdapter, logDataSourceOutageAsErrorAfter);
+ }
+
+ private static LDLogAdapter getDefaultLogAdapter() {
+ // If SLF4J is present in the classpath, use that by default; otherwise use the console.
+ try {
+ Class.forName("org.slf4j.LoggerFactory");
+ return LDSLF4J.adapter();
+ } catch (ClassNotFoundException e) {
+ return Logs.toConsole();
+ }
}
}
@@ -405,4 +440,22 @@ public ServiceEndpoints createServiceEndpoints() {
return new ServiceEndpoints(streamingBaseUri, pollingBaseUri, eventsBaseUri);
}
}
+
+ static HttpProperties toHttpProperties(HttpConfiguration httpConfig) {
+ okhttp3.Authenticator proxyAuth = null;
+ if (httpConfig.getProxyAuthentication() != null) {
+ proxyAuth = Util.okhttpAuthenticatorFromHttpAuthStrategy(httpConfig.getProxyAuthentication());
+ }
+ return new HttpProperties(
+ httpConfig.getConnectTimeout().toMillis(),
+ ImmutableMap.copyOf(httpConfig.getDefaultHeaders()),
+ null,
+ httpConfig.getProxy(),
+ proxyAuth,
+ httpConfig.getSocketFactory(),
+ httpConfig.getSocketTimeout().toMillis(),
+ httpConfig.getSslSocketFactory(),
+ httpConfig.getTrustManager()
+ );
+ }
}
diff --git a/src/main/java/com/launchdarkly/sdk/server/DataModel.java b/src/main/java/com/launchdarkly/sdk/server/DataModel.java
index fb32db264..42cb321e3 100644
--- a/src/main/java/com/launchdarkly/sdk/server/DataModel.java
+++ b/src/main/java/com/launchdarkly/sdk/server/DataModel.java
@@ -2,18 +2,21 @@
import com.google.common.collect.ImmutableList;
import com.google.gson.annotations.JsonAdapter;
+import com.launchdarkly.sdk.AttributeRef;
+import com.launchdarkly.sdk.ContextKind;
import com.launchdarkly.sdk.LDValue;
-import com.launchdarkly.sdk.UserAttribute;
import com.launchdarkly.sdk.server.DataModelPreprocessing.ClausePreprocessed;
import com.launchdarkly.sdk.server.DataModelPreprocessing.FlagPreprocessed;
import com.launchdarkly.sdk.server.DataModelPreprocessing.FlagRulePreprocessed;
import com.launchdarkly.sdk.server.DataModelPreprocessing.PrerequisitePreprocessed;
import com.launchdarkly.sdk.server.DataModelPreprocessing.TargetPreprocessed;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.DataKind;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.ItemDescriptor;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor;
import java.util.Collection;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import static java.util.Collections.emptyList;
@@ -29,19 +32,20 @@
// implementing a custom component such as a data store. But beyond the mere fact of there being these kinds of
// data, applications should not be considered with their structure.
//
-// - For all classes that can be deserialized from JSON, there must be an empty constructor, and the fields
-// cannot be final. This is because of how Gson works: it creates an instance first, then sets the fields. If
-// we are able to move away from using Gson reflective deserialization in the future, we can make them final.
+// - For classes that can be deserialized from JSON, if we are relying on Gson's reflective behavior (i.e. if
+// the class does not have a custom TypeAdapter), there must be an empty constructor, and the fields cannot
+// be final. This is because of how Gson works: it creates an instance first, then sets the fields; that also
+// means we cannot do any transformation/validation of the fields in the constructor. But if we have a custom
+// deserializer, then we should use final fields.
//
-// - There should also be a constructor that takes all the fields; we should use that whenever we need to
-// create these objects programmatically (so that if we are able at some point to make the fields final, that
+// - In any case, there should be a constructor that takes all the fields; we should use that whenever we need
+// to create these objects programmatically (so that if we are able at some point to make the fields final, that
// won't break anything).
//
-// - For properties that have a collection type such as List, the getter method should always include a null
-// guard and return an empty collection if the field is null (so that we don't have to worry about null guards
-// every time we might want to iterate over these collections). Semantically there is no difference in the data
-// model between an empty list and a null list, and in some languages (particularly Go) it is easy for an
-// uninitialized list to be serialized to JSON as null.
+// - For properties that have a collection type such as List, we should ensure that a null is always changed to
+// an empty list (in the constructor, if the field can be made final; otherwise in the getter). Semantically
+// there is no difference in the data model between an empty list and a null list, and in some languages
+// (particularly Go) it is easy for an uninitialized list to be serialized to JSON as null.
//
// - Some classes have a "preprocessed" field containing types defined in DataModelPreprocessing. These fields
// must always be marked transient, so Gson will not serialize them. They are populated when we deserialize a
@@ -127,6 +131,7 @@ static final class FeatureFlag implements VersionedData, JsonHelpers.PostProcess
private List prerequisites;
private String salt;
private List targets;
+ private List contextTargets;
private List rules;
private VariationOrRollout fallthrough;
private Integer offVariation; //optional
@@ -143,8 +148,8 @@ static final class FeatureFlag implements VersionedData, JsonHelpers.PostProcess
FeatureFlag() {}
FeatureFlag(String key, int version, boolean on, List prerequisites, String salt, List targets,
- List rules, VariationOrRollout fallthrough, Integer offVariation, List variations,
- boolean clientSide, boolean trackEvents, boolean trackEventsFallthrough,
+ List contextTargets, List rules, VariationOrRollout fallthrough, Integer offVariation,
+ List variations, boolean clientSide, boolean trackEvents, boolean trackEventsFallthrough,
Long debugEventsUntilDate, boolean deleted) {
this.key = key;
this.version = version;
@@ -152,6 +157,7 @@ static final class FeatureFlag implements VersionedData, JsonHelpers.PostProcess
this.prerequisites = prerequisites;
this.salt = salt;
this.targets = targets;
+ this.contextTargets = contextTargets;
this.rules = rules;
this.fallthrough = fallthrough;
this.offVariation = offVariation;
@@ -204,6 +210,11 @@ List getTargets() {
return targets == null ? emptyList() : targets;
}
+ // Guaranteed non-null
+ List getContextTargets() {
+ return contextTargets == null ? emptyList() : contextTargets;
+ }
+
// Guaranteed non-null
List getRules() {
return rules == null ? emptyList() : rules;
@@ -254,6 +265,7 @@ int getVariation() {
}
static final class Target {
+ private ContextKind contextKind;
private Set values;
private int variation;
@@ -261,11 +273,16 @@ static final class Target {
Target() {}
- Target(Set values, int variation) {
+ Target(ContextKind contextKind, Set values, int variation) {
+ this.contextKind = contextKind;
this.values = values;
this.variation = variation;
}
+ ContextKind getContextKind() {
+ return contextKind;
+ }
+
// Guaranteed non-null
Collection getValues() {
return values == null ? emptySet() : values;
@@ -313,25 +330,29 @@ boolean isTrackEvents() {
}
}
+ @JsonAdapter(DataModelSerialization.ClauseTypeAdapter.class)
static final class Clause {
- private UserAttribute attribute;
- private Operator op;
- private List values; //interpreted as an OR of values
- private boolean negate;
+ private final ContextKind contextKind;
+ private final AttributeRef attribute;
+ private final Operator op;
+ private final List values; //interpreted as an OR of values
+ private final boolean negate;
transient ClausePreprocessed preprocessed;
- Clause() {
- }
-
- Clause(UserAttribute attribute, Operator op, List values, boolean negate) {
+ Clause(ContextKind contextKind, AttributeRef attribute, Operator op, List values, boolean negate) {
+ this.contextKind = contextKind;
this.attribute = attribute;
this.op = op;
- this.values = values;
+ this.values = values == null ? emptyList() : values;;
this.negate = negate;
}
- UserAttribute getAttribute() {
+ ContextKind getContextKind() {
+ return contextKind;
+ }
+
+ AttributeRef getAttribute() {
return attribute;
}
@@ -341,7 +362,7 @@ Operator getOp() {
// Guaranteed non-null
List getValues() {
- return values == null ? emptyList() : values;
+ return values;
}
boolean isNegate() {
@@ -349,34 +370,32 @@ boolean isNegate() {
}
}
+ @JsonAdapter(DataModelSerialization.RolloutTypeAdapter.class)
static final class Rollout {
- private List variations;
- private UserAttribute bucketBy;
- private RolloutKind kind;
- private Integer seed;
+ private final ContextKind contextKind;
+ private final List variations;
+ private final AttributeRef bucketBy;
+ private final RolloutKind kind;
+ private final Integer seed;
- Rollout() {}
-
- Rollout(List variations, UserAttribute bucketBy, RolloutKind kind) {
- this.variations = variations;
+ Rollout(ContextKind contextKind, List variations, AttributeRef bucketBy, RolloutKind kind, Integer seed) {
+ this.contextKind = contextKind;
+ this.variations = variations == null ? emptyList() : variations;
this.bucketBy = bucketBy;
this.kind = kind;
- this.seed = null;
+ this.seed = seed;
}
- Rollout(List variations, UserAttribute bucketBy, RolloutKind kind, Integer seed) {
- this.variations = variations;
- this.bucketBy = bucketBy;
- this.kind = kind;
- this.seed = seed;
+ ContextKind getContextKind() {
+ return contextKind;
}
// Guaranteed non-null
List getVariations() {
- return variations == null ? emptyList() : variations;
+ return variations;
}
- UserAttribute getBucketBy() {
+ AttributeRef getBucketBy() {
return bucketBy;
}
@@ -448,11 +467,14 @@ static final class Segment implements VersionedData, JsonHelpers.PostProcessingD
private String key;
private Set included;
private Set excluded;
+ private List includedContexts;
+ private List excludedContexts;
private String salt;
private List rules;
private int version;
private boolean deleted;
private boolean unbounded;
+ private ContextKind unboundedContextKind;
private Integer generation;
Segment() {}
@@ -460,20 +482,26 @@ static final class Segment implements VersionedData, JsonHelpers.PostProcessingD
Segment(String key,
Set included,
Set excluded,
+ List includedContexts,
+ List excludedContexts,
String salt,
List rules,
int version,
boolean deleted,
boolean unbounded,
+ ContextKind unboundedContextKind,
Integer generation) {
this.key = key;
this.included = included;
this.excluded = excluded;
+ this.includedContexts = includedContexts;
+ this.excludedContexts = excludedContexts;
this.salt = salt;
this.rules = rules;
this.version = version;
this.deleted = deleted;
this.unbounded = unbounded;
+ this.unboundedContextKind = unboundedContextKind;
this.generation = generation;
}
@@ -491,6 +519,16 @@ Collection getExcluded() {
return excluded == null ? emptySet() : excluded;
}
+ // Guaranteed non-null
+ List getIncludedContexts() {
+ return includedContexts == null ? emptyList() : includedContexts;
+ }
+
+ // Guaranteed non-null
+ List getExcludedContexts() {
+ return excludedContexts == null ? emptyList() : excludedContexts;
+ }
+
String getSalt() {
return salt;
}
@@ -512,6 +550,10 @@ public boolean isUnbounded() {
return unbounded;
}
+ public ContextKind getUnboundedContextKind() {
+ return unboundedContextKind;
+ }
+
public Integer getGeneration() {
return generation;
}
@@ -521,53 +563,135 @@ public void afterDeserialized() {
}
}
+ @JsonAdapter(DataModelSerialization.SegmentRuleTypeAdapter.class)
static final class SegmentRule {
private final List clauses;
private final Integer weight;
- private final UserAttribute bucketBy;
+ private final ContextKind rolloutContextKind;
+ private final AttributeRef bucketBy;
- SegmentRule(List clauses, Integer weight, UserAttribute bucketBy) {
- this.clauses = clauses;
+ SegmentRule(List clauses, Integer weight, ContextKind rolloutContextKind, AttributeRef bucketBy) {
+ this.clauses = clauses == null ? emptyList() : clauses;
this.weight = weight;
+ this.rolloutContextKind = rolloutContextKind;
this.bucketBy = bucketBy;
}
// Guaranteed non-null
List getClauses() {
- return clauses == null ? emptyList() : clauses;
+ return clauses;
}
Integer getWeight() {
return weight;
}
- UserAttribute getBucketBy() {
+ ContextKind getRolloutContextKind() {
+ return rolloutContextKind;
+ }
+
+ AttributeRef getBucketBy() {
return bucketBy;
}
}
+ static class SegmentTarget {
+ private ContextKind contextKind;
+ private Set values;
+
+ SegmentTarget(ContextKind contextKind, Set values) {
+ this.contextKind = contextKind;
+ this.values = values;
+ }
+
+ ContextKind getContextKind() {
+ return contextKind;
+ }
+
+ Set getValues() { // guaranteed non-null
+ return values == null ? emptySet() : values;
+ }
+ }
+
/**
- * This enum can be directly deserialized from JSON, avoiding the need for a mapping of strings to
- * operators. The implementation of each operator is in EvaluatorOperators.
+ * This is an enum-like type rather than an enum because we don't want unrecognized operators to
+ * cause parsing of the whole JSON environment to fail. The implementation of each operator is in
+ * EvaluatorOperators.
*/
- static enum Operator {
- in,
- endsWith,
- startsWith,
- matches,
- contains,
- lessThan,
- lessThanOrEqual,
- greaterThan,
- greaterThanOrEqual,
- before,
- after,
- semVerEqual,
- semVerLessThan,
- semVerGreaterThan,
- segmentMatch
- }
+ static class Operator {
+ private final String name;
+ private final boolean builtin;
+ private final int hashCode;
+
+ private static final Map builtins = new HashMap<>();
+
+ private Operator(String name, boolean builtin) {
+ this.name = name;
+ this.builtin = builtin;
+
+ // Precompute the hash code for fast map lookups - String.hashCode() does memoize this value,
+ // sort of, but we shouldn't have to rely on that
+ this.hashCode = name.hashCode();
+ }
+
+ private static Operator builtin(String name) {
+ Operator op = new Operator(name, true);
+ builtins.put(name, op);
+ return op;
+ }
+
+ static final Operator in = builtin("in");
+ static final Operator startsWith = builtin("startsWith");
+ static final Operator endsWith = builtin("endsWith");
+ static final Operator matches = builtin("matches");
+ static final Operator contains = builtin("contains");
+ static final Operator lessThan = builtin("lessThan");
+ static final Operator lessThanOrEqual = builtin("lessThanOrEqual");
+ static final Operator greaterThan = builtin("greaterThan");
+ static final Operator greaterThanOrEqual = builtin("greaterThanOrEqual");
+ static final Operator before = builtin("before");
+ static final Operator after = builtin("after");
+ static final Operator semVerEqual = builtin("semVerEqual");
+ static final Operator semVerLessThan = builtin("semVerLessThan");
+ static final Operator semVerGreaterThan = builtin("semVerGreaterThan");
+ static final Operator segmentMatch = builtin("segmentMatch");
+
+ static Operator forName(String name) {
+ // Normally we will only see names that are in the builtins map. Anything else is something
+ // the SDK doesn't recognize, but we still need to allow it to exist rather than throwing
+ // an error.
+ Operator op = builtins.get(name);
+ return op == null ? new Operator(name, false) : op;
+ }
+
+ static Iterable getBuiltins() {
+ return builtins.values();
+ }
+ String name() {
+ return name;
+ }
+
+ @Override
+ public String toString() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this.builtin) {
+ // reference equality is OK for the builtin ones, because we intern them
+ return this == other;
+ }
+ return other instanceof Operator && ((Operator)other).name.equals(this.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return hashCode;
+ }
+ }
+
/**
* This enum is all lowercase so that when it is automatically deserialized from JSON,
* the lowercase properties properly map to these enumerations.
diff --git a/src/main/java/com/launchdarkly/sdk/server/DataModelDependencies.java b/src/main/java/com/launchdarkly/sdk/server/DataModelDependencies.java
index a69a50bc4..46cb04e6e 100644
--- a/src/main/java/com/launchdarkly/sdk/server/DataModelDependencies.java
+++ b/src/main/java/com/launchdarkly/sdk/server/DataModelDependencies.java
@@ -6,10 +6,10 @@
import com.google.common.collect.Iterables;
import com.launchdarkly.sdk.LDValue;
import com.launchdarkly.sdk.server.DataModel.Operator;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.DataKind;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.FullDataSet;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.ItemDescriptor;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.KeyedItems;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems;
import java.util.Comparator;
import java.util.HashMap;
@@ -28,7 +28,7 @@
/**
* Implements a dependency graph ordering for data to be stored in a data store.
*
- * We use this to order the data that we pass to {@link com.launchdarkly.sdk.server.interfaces.DataStore#init(FullDataSet)},
+ * We use this to order the data that we pass to {@link com.launchdarkly.sdk.server.subsystems.DataStore#init(FullDataSet)},
* and also to determine which flags are affected by a change if the application is listening for flag change events.
*
* Dependencies are defined as follows: there is a dependency from flag F to flag G if F is a prerequisite flag for
@@ -84,27 +84,37 @@ public static Set computeDependenciesFrom(DataKind fromKind, ItemDes
Iterable segmentKeys = concat(
transform(
flag.getRules(),
- rule -> concat(
- Iterables.>transform(
- rule.getClauses(),
- clause -> clause.getOp() == Operator.segmentMatch ?
- transform(clause.getValues(), LDValue::stringValue) :
- emptyList()
- )
- )
- )
+ rule -> segmentKeysFromClauses(rule.getClauses()))
);
return ImmutableSet.copyOf(
- concat(
- transform(prereqFlagKeys, key -> new KindAndKey(FEATURES, key)),
- transform(segmentKeys, key -> new KindAndKey(SEGMENTS, key))
- )
+ concat(kindAndKeys(FEATURES, prereqFlagKeys), kindAndKeys(SEGMENTS, segmentKeys))
);
+ } else if (fromKind == SEGMENTS) {
+ DataModel.Segment segment = (DataModel.Segment)fromItem.getItem();
+
+ Iterable nestedSegmentKeys = concat(
+ transform(
+ segment.getRules(),
+ rule -> segmentKeysFromClauses(rule.getClauses())));
+ return ImmutableSet.copyOf(kindAndKeys(SEGMENTS, nestedSegmentKeys));
}
return emptySet();
}
+ private static Iterable kindAndKeys(DataKind kind, Iterable keys) {
+ return transform(keys, key -> new KindAndKey(kind, key));
+ }
+
+ private static Iterable segmentKeysFromClauses(Iterable clauses) {
+ return concat(Iterables.>transform(
+ clauses,
+ clause -> clause.getOp() == Operator.segmentMatch ?
+ transform(clause.getValues(), LDValue::stringValue) :
+ emptyList()
+ ));
+ }
+
/**
* Returns a copy of the input data set that guarantees that if you iterate through it the outer list and
* the inner list in the order provided, any object that depends on another object will be updated after it.
diff --git a/src/main/java/com/launchdarkly/sdk/server/DataModelPreprocessing.java b/src/main/java/com/launchdarkly/sdk/server/DataModelPreprocessing.java
index af49227db..5a1c48e2b 100644
--- a/src/main/java/com/launchdarkly/sdk/server/DataModelPreprocessing.java
+++ b/src/main/java/com/launchdarkly/sdk/server/DataModelPreprocessing.java
@@ -21,6 +21,14 @@
import java.util.function.Function;
import java.util.regex.Pattern;
+import static com.launchdarkly.sdk.server.DataModel.Operator.after;
+import static com.launchdarkly.sdk.server.DataModel.Operator.before;
+import static com.launchdarkly.sdk.server.DataModel.Operator.in;
+import static com.launchdarkly.sdk.server.DataModel.Operator.matches;
+import static com.launchdarkly.sdk.server.DataModel.Operator.semVerEqual;
+import static com.launchdarkly.sdk.server.DataModel.Operator.semVerGreaterThan;
+import static com.launchdarkly.sdk.server.DataModel.Operator.semVerLessThan;
+
/**
* Additional information that we attach to our data model to reduce the overhead of feature flag
* evaluations. The methods that create these objects are called by the afterDeserialized() methods
@@ -142,6 +150,9 @@ static void preprocessFlag(FeatureFlag f) {
for (Target t: f.getTargets()) {
preprocessTarget(t, f);
}
+ for (Target t: f.getContextTargets()) {
+ preprocessTarget(t, f);
+ }
List rules = f.getRules();
int n = rules.size();
for (int i = 0; i < n; i++) {
@@ -198,8 +209,7 @@ static void preprocessClause(Clause c) {
if (op == null) {
return;
}
- switch (op) {
- case in:
+ if (op == in) {
// This is a special case where the clause is testing for an exact match against any of the
// clause values. Converting the value list to a Set allows us to do a fast lookup instead of
// a linear search. We do not do this for other operators (or if there are fewer than two
@@ -207,27 +217,18 @@ static void preprocessClause(Clause c) {
if (values.size() > 1) {
c.preprocessed = new ClausePreprocessed(ImmutableSet.copyOf(values), null);
}
- break;
- case matches:
+ } else if (op == matches) {
c.preprocessed = preprocessClauseValues(c.getValues(), v ->
new ClausePreprocessed.ValueData(null, EvaluatorTypeConversion.valueToRegex(v), null)
);
- break;
- case after:
- case before:
+ } else if (op == after || op == before) {
c.preprocessed = preprocessClauseValues(c.getValues(), v ->
new ClausePreprocessed.ValueData(EvaluatorTypeConversion.valueToDateTime(v), null, null)
);
- break;
- case semVerEqual:
- case semVerGreaterThan:
- case semVerLessThan:
+ } else if (op == semVerEqual || op == semVerGreaterThan || op == semVerLessThan) {
c.preprocessed = preprocessClauseValues(c.getValues(), v ->
new ClausePreprocessed.ValueData(null, null, EvaluatorTypeConversion.valueToSemVer(v))
);
- break;
- default:
- break;
}
}
diff --git a/src/main/java/com/launchdarkly/sdk/server/DataModelSerialization.java b/src/main/java/com/launchdarkly/sdk/server/DataModelSerialization.java
index dd55cb879..387e0ee6f 100644
--- a/src/main/java/com/launchdarkly/sdk/server/DataModelSerialization.java
+++ b/src/main/java/com/launchdarkly/sdk/server/DataModelSerialization.java
@@ -3,24 +3,38 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonElement;
+import com.google.gson.TypeAdapter;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonToken;
+import com.google.gson.stream.JsonWriter;
+import com.launchdarkly.sdk.AttributeRef;
+import com.launchdarkly.sdk.ContextKind;
+import com.launchdarkly.sdk.LDValue;
+import com.launchdarkly.sdk.server.DataModel.Clause;
import com.launchdarkly.sdk.server.DataModel.FeatureFlag;
+import com.launchdarkly.sdk.server.DataModel.Operator;
+import com.launchdarkly.sdk.server.DataModel.Rollout;
+import com.launchdarkly.sdk.server.DataModel.RolloutKind;
import com.launchdarkly.sdk.server.DataModel.Segment;
+import com.launchdarkly.sdk.server.DataModel.SegmentRule;
import com.launchdarkly.sdk.server.DataModel.VersionedData;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.DataKind;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.FullDataSet;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.ItemDescriptor;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.KeyedItems;
-import com.launchdarkly.sdk.server.interfaces.SerializationException;
+import com.launchdarkly.sdk.server.DataModel.WeightedVariation;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems;
+import com.launchdarkly.sdk.server.subsystems.SerializationException;
import java.io.IOException;
import java.util.AbstractMap;
+import java.util.ArrayList;
+import java.util.List;
import java.util.Map;
import static com.launchdarkly.sdk.server.DataModel.FEATURES;
import static com.launchdarkly.sdk.server.DataModel.SEGMENTS;
import static com.launchdarkly.sdk.server.JsonHelpers.gsonInstance;
+import static com.launchdarkly.sdk.server.JsonHelpers.gsonInstanceWithNullsAllowed;
/**
* JSON conversion logic specifically for our data model types.
@@ -138,4 +152,216 @@ static FullDataSet parseFullDataSet(JsonReader jr) throws Serial
throw new SerializationException(e);
}
}
+
+ // Custom deserialization logic for Clause because the attribute field is treated differently
+ // depending on the contextKind field (if contextKind is null, we always parse attribute as a
+ // literal attribute name and not a reference).
+ static class ClauseTypeAdapter extends TypeAdapter {
+ @Override
+ public void write(JsonWriter out, Clause c) throws IOException {
+ out.beginObject();
+ if (c.getContextKind() != null) {
+ out.name("contextKind").value(c.getContextKind().toString());
+ }
+ out.name("attribute").value(c.getAttribute() == null ? null : c.getAttribute().toString());
+ out.name("op").value(c.getOp() == null ? null : c.getOp().name());
+ out.name("values").beginArray();
+ for (LDValue v: c.getValues()) {
+ gsonInstanceWithNullsAllowed().toJson(v, LDValue.class, out);
+ }
+ out.endArray();
+ out.name("negate").value(c.isNegate());
+ out.endObject();
+ }
+
+ @Override
+ public Clause read(JsonReader in) throws IOException {
+ ContextKind contextKind = null;
+ String attrString = null;
+ Operator op = null;
+ List values = new ArrayList<>();
+ boolean negate = false;
+ in.beginObject();
+ while (in.hasNext()) {
+ switch (in.nextName()) {
+ case "contextKind":
+ contextKind = ContextKind.of(in.nextString());
+ break;
+ case "attribute":
+ attrString = in.nextString();
+ break;
+ case "op":
+ op = Operator.forName(in.nextString());
+ break;
+ case "values":
+ if (in.peek() == JsonToken.NULL) {
+ in.skipValue();
+ } else {
+ in.beginArray();
+ while (in.hasNext()) {
+ LDValue value = gsonInstanceWithNullsAllowed().fromJson(in, LDValue.class);
+ values.add(value);
+ }
+ in.endArray();
+ }
+ break;
+ case "negate":
+ negate = in.nextBoolean();
+ break;
+ default:
+ in.skipValue();
+ }
+ }
+ in.endObject();
+ AttributeRef attribute = attributeNameOrPath(attrString, contextKind);
+ return new Clause(contextKind, attribute, op, values, negate);
+ }
+ }
+
+ // Custom deserialization logic for Rollout for a similar reason to Clause.
+ static class RolloutTypeAdapter extends TypeAdapter {
+ @Override
+ public void write(JsonWriter out, Rollout r) throws IOException {
+ out.beginObject();
+ if (r.getContextKind() != null) {
+ out.name("contextKind").value(r.getContextKind().toString());
+ }
+ out.name("variations").beginArray();
+ for (WeightedVariation wv: r.getVariations()) {
+ gsonInstanceWithNullsAllowed().toJson(wv, WeightedVariation.class, out);
+ }
+ out.endArray();
+ if (r.getBucketBy() != null) {
+ out.name("bucketBy").value(r.getBucketBy().toString());
+ }
+ if (r.getKind() != RolloutKind.rollout) {
+ out.name("kind").value(r.getKind().name());
+ }
+ if (r.getSeed() != null) {
+ out.name("seed").value(r.getSeed());
+ }
+ out.endObject();
+ }
+
+ @Override
+ public Rollout read(JsonReader in) throws IOException {
+ ContextKind contextKind = null;
+ List variations = new ArrayList<>();
+ String bucketByString = null;
+ RolloutKind kind = RolloutKind.rollout;
+ Integer seed = null;
+ in.beginObject();
+ while (in.hasNext()) {
+ switch (in.nextName()) {
+ case "contextKind":
+ contextKind = ContextKind.of(in.nextString());
+ break;
+ case "variations":
+ if (in.peek() == JsonToken.NULL) {
+ in.skipValue();
+ } else {
+ in.beginArray();
+ while (in.hasNext()) {
+ WeightedVariation wv = gsonInstanceWithNullsAllowed().fromJson(in, WeightedVariation.class);
+ variations.add(wv);
+ }
+ in.endArray();
+ }
+ break;
+ case "bucketBy":
+ bucketByString = in.nextString();
+ break;
+ case "kind":
+ kind = RolloutKind.experiment.name().equals(in.nextString()) ? RolloutKind.experiment :
+ RolloutKind.rollout;
+ break;
+ case "seed":
+ seed = readNullableInt(in);
+ break;
+ default:
+ in.skipValue();
+ }
+ }
+ in.endObject();
+ AttributeRef bucketBy = attributeNameOrPath(bucketByString, contextKind);
+ return new Rollout(contextKind, variations, bucketBy, kind, seed);
+ }
+ }
+
+ // Custom deserialization logic for SegmentRule for a similar reason to Clause.
+ static class SegmentRuleTypeAdapter extends TypeAdapter {
+ @Override
+ public void write(JsonWriter out, SegmentRule sr) throws IOException {
+ out.beginObject();
+ out.name("clauses").beginArray();
+ for (Clause c: sr.getClauses()) {
+ gsonInstanceWithNullsAllowed().toJson(c, Clause.class, out);
+ }
+ out.endArray();
+ if (sr.getWeight() != null) {
+ out.name("weight").value(sr.getWeight());
+ }
+ if (sr.getRolloutContextKind() != null) {
+ out.name("rolloutContextKind").value(sr.getRolloutContextKind().toString());
+ }
+ if (sr.getBucketBy() != null) {
+ out.name("bucketBy").value(sr.getBucketBy().toString());
+ }
+ out.endObject();
+ }
+
+ @Override
+ public SegmentRule read(JsonReader in) throws IOException {
+ List clauses = new ArrayList<>();
+ Integer weight = null;
+ ContextKind rolloutContextKind = null;
+ String bucketByString = null;
+ in.beginObject();
+ while (in.hasNext()) {
+ switch (in.nextName()) {
+ case "clauses":
+ if (in.peek() == JsonToken.NULL) {
+ in.skipValue();
+ } else {
+ in.beginArray();
+ while (in.hasNext()) {
+ Clause c = gsonInstanceWithNullsAllowed().fromJson(in, Clause.class);
+ clauses.add(c);
+ }
+ in.endArray();
+ }
+ break;
+ case "weight":
+ weight = readNullableInt(in);
+ break;
+ case "rolloutContextKind":
+ rolloutContextKind = ContextKind.of(in.nextString());
+ break;
+ case "bucketBy":
+ bucketByString = in.nextString();
+ break;
+ default:
+ in.skipValue();
+ }
+ }
+ in.endObject();
+ AttributeRef bucketBy = attributeNameOrPath(bucketByString, rolloutContextKind);
+ return new SegmentRule(clauses, weight, rolloutContextKind, bucketBy);
+ }
+ }
+
+ static Integer readNullableInt(JsonReader in) throws IOException {
+ if (in.peek() == JsonToken.NULL) {
+ in.skipValue();
+ return null;
+ }
+ return in.nextInt();
+ }
+
+ static AttributeRef attributeNameOrPath(String attrString, ContextKind contextKind) {
+ if (attrString == null) {
+ return null;
+ }
+ return contextKind == null ? AttributeRef.fromLiteral(attrString) : AttributeRef.fromPath(attrString);
+ }
}
diff --git a/src/main/java/com/launchdarkly/sdk/server/DataSourceUpdatesImpl.java b/src/main/java/com/launchdarkly/sdk/server/DataSourceUpdatesImpl.java
index e681e5147..ba9c261d6 100644
--- a/src/main/java/com/launchdarkly/sdk/server/DataSourceUpdatesImpl.java
+++ b/src/main/java/com/launchdarkly/sdk/server/DataSourceUpdatesImpl.java
@@ -11,13 +11,13 @@
import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State;
import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.Status;
import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.StatusListener;
-import com.launchdarkly.sdk.server.interfaces.DataSourceUpdates;
-import com.launchdarkly.sdk.server.interfaces.DataStore;
+import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink;
+import com.launchdarkly.sdk.server.subsystems.DataStore;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems;
import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.DataKind;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.FullDataSet;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.ItemDescriptor;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.KeyedItems;
import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent;
import com.launchdarkly.sdk.server.interfaces.FlagChangeListener;
@@ -48,7 +48,7 @@
*
* @since 4.11.0
*/
-final class DataSourceUpdatesImpl implements DataSourceUpdates {
+final class DataSourceUpdatesImpl implements DataSourceUpdateSink {
private final DataStore store;
private final EventBroadcasterImpl flagChangeEventNotifier;
private final EventBroadcasterImpl dataSourceStatusNotifier;
diff --git a/src/main/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImpl.java b/src/main/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImpl.java
index 207ee24e3..7695ca792 100644
--- a/src/main/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImpl.java
+++ b/src/main/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImpl.java
@@ -1,7 +1,7 @@
package com.launchdarkly.sdk.server;
-import com.launchdarkly.sdk.server.interfaces.DataStore;
import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider;
+import com.launchdarkly.sdk.server.subsystems.DataStore;
final class DataStoreStatusProviderImpl implements DataStoreStatusProvider {
private final DataStore store;
diff --git a/src/main/java/com/launchdarkly/sdk/server/DataStoreUpdatesImpl.java b/src/main/java/com/launchdarkly/sdk/server/DataStoreUpdatesImpl.java
index 93b01eb38..21a1cabd6 100644
--- a/src/main/java/com/launchdarkly/sdk/server/DataStoreUpdatesImpl.java
+++ b/src/main/java/com/launchdarkly/sdk/server/DataStoreUpdatesImpl.java
@@ -1,11 +1,11 @@
package com.launchdarkly.sdk.server;
import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider;
-import com.launchdarkly.sdk.server.interfaces.DataStoreUpdates;
+import com.launchdarkly.sdk.server.subsystems.DataStoreUpdateSink;
import java.util.concurrent.atomic.AtomicReference;
-class DataStoreUpdatesImpl implements DataStoreUpdates {
+class DataStoreUpdatesImpl implements DataStoreUpdateSink {
// package-private because it's convenient to use these from DataStoreStatusProviderImpl
final EventBroadcasterImpl statusBroadcaster;
final AtomicReference lastStatus;
diff --git a/src/main/java/com/launchdarkly/sdk/server/DefaultEventProcessor.java b/src/main/java/com/launchdarkly/sdk/server/DefaultEventProcessor.java
deleted file mode 100644
index a96170cbe..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/DefaultEventProcessor.java
+++ /dev/null
@@ -1,649 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import com.launchdarkly.logging.LDLogger;
-import com.launchdarkly.logging.LogValues;
-import com.launchdarkly.sdk.LDUser;
-import com.launchdarkly.sdk.server.EventSummarizer.EventSummary;
-import com.launchdarkly.sdk.server.interfaces.Event;
-import com.launchdarkly.sdk.server.interfaces.EventProcessor;
-import com.launchdarkly.sdk.server.interfaces.EventSender;
-import com.launchdarkly.sdk.server.interfaces.EventSender.EventDataKind;
-
-import java.io.IOException;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.ArrayBlockingQueue;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.Semaphore;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-
-final class DefaultEventProcessor implements EventProcessor {
- @VisibleForTesting final EventDispatcher dispatcher;
- private final BlockingQueue inbox;
- private final ScheduledExecutorService scheduler;
- private final AtomicBoolean closed = new AtomicBoolean(false);
- private final List> scheduledTasks = new ArrayList<>();
- private volatile boolean inputCapacityExceeded = false;
- private final LDLogger logger;
-
- DefaultEventProcessor(
- EventsConfiguration eventsConfig,
- ScheduledExecutorService sharedExecutor,
- int threadPriority,
- DiagnosticAccumulator diagnosticAccumulator,
- DiagnosticEvent.Init diagnosticInitEvent,
- LDLogger logger
- ) {
- inbox = new ArrayBlockingQueue<>(eventsConfig.capacity);
-
- scheduler = sharedExecutor;
- this.logger = logger;
-
- dispatcher = new EventDispatcher(
- eventsConfig,
- sharedExecutor,
- threadPriority,
- inbox,
- closed,
- diagnosticAccumulator,
- diagnosticInitEvent,
- logger
- );
-
- Runnable flusher = () -> {
- postMessageAsync(MessageType.FLUSH, null);
- };
- scheduledTasks.add(this.scheduler.scheduleAtFixedRate(flusher, eventsConfig.flushInterval.toMillis(),
- eventsConfig.flushInterval.toMillis(), TimeUnit.MILLISECONDS));
- Runnable userKeysFlusher = () -> {
- postMessageAsync(MessageType.FLUSH_USERS, null);
- };
- scheduledTasks.add(this.scheduler.scheduleAtFixedRate(userKeysFlusher, eventsConfig.userKeysFlushInterval.toMillis(),
- eventsConfig.userKeysFlushInterval.toMillis(), TimeUnit.MILLISECONDS));
- if (diagnosticAccumulator != null) {
- Runnable diagnosticsTrigger = () -> {
- postMessageAsync(MessageType.DIAGNOSTIC, null);
- };
- scheduledTasks.add(this.scheduler.scheduleAtFixedRate(diagnosticsTrigger, eventsConfig.diagnosticRecordingInterval.toMillis(),
- eventsConfig.diagnosticRecordingInterval.toMillis(), TimeUnit.MILLISECONDS));
- }
- }
-
- @Override
- public void sendEvent(Event e) {
- if (!closed.get()) {
- postMessageAsync(MessageType.EVENT, e);
- }
- }
-
- @Override
- public void flush() {
- if (!closed.get()) {
- postMessageAsync(MessageType.FLUSH, null);
- }
- }
-
- @Override
- public void close() throws IOException {
- if (closed.compareAndSet(false, true)) {
- scheduledTasks.forEach(task -> task.cancel(false));
- postMessageAsync(MessageType.FLUSH, null);
- postMessageAndWait(MessageType.SHUTDOWN, null);
- }
- }
-
- @VisibleForTesting
- void waitUntilInactive() throws IOException {
- postMessageAndWait(MessageType.SYNC, null);
- }
-
- @VisibleForTesting
- void postDiagnostic() {
- postMessageAsync(MessageType.DIAGNOSTIC, null);
- }
-
- private void postMessageAsync(MessageType type, Event event) {
- postToChannel(new EventProcessorMessage(type, event, false));
- }
-
- private void postMessageAndWait(MessageType type, Event event) {
- EventProcessorMessage message = new EventProcessorMessage(type, event, true);
- if (postToChannel(message)) {
- // COVERAGE: There is no way to reliably cause this to fail in tests
- message.waitForCompletion();
- }
- }
-
- private boolean postToChannel(EventProcessorMessage message) {
- if (inbox.offer(message)) {
- return true;
- }
- // If the inbox is full, it means the EventDispatcher thread is seriously backed up with not-yet-processed
- // events. This is unlikely, but if it happens, it means the application is probably doing a ton of flag
- // evaluations across many threads-- so if we wait for a space in the inbox, we risk a very serious slowdown
- // of the app. To avoid that, we'll just drop the event. The log warning about this will only be shown once.
- boolean alreadyLogged = inputCapacityExceeded; // possible race between this and the next line, but it's of no real consequence - we'd just get an extra log line
- inputCapacityExceeded = true;
- // COVERAGE: There is no way to reliably cause this condition in tests
- if (!alreadyLogged) {
- logger.warn("Events are being produced faster than they can be processed; some events will be dropped");
- }
- return false;
- }
-
- private static enum MessageType {
- EVENT,
- FLUSH,
- FLUSH_USERS,
- DIAGNOSTIC,
- SYNC,
- SHUTDOWN
- }
-
- private static final class EventProcessorMessage {
- private final MessageType type;
- private final Event event;
- private final Semaphore reply;
-
- private EventProcessorMessage(MessageType type, Event event, boolean sync) {
- this.type = type;
- this.event = event;
- reply = sync ? new Semaphore(0) : null;
- }
-
- void completed() {
- if (reply != null) {
- reply.release();
- }
- }
-
- void waitForCompletion() {
- if (reply == null) { // COVERAGE: there is no way to make this happen from test code
- return;
- }
- while (true) {
- try {
- reply.acquire();
- return;
- }
- catch (InterruptedException ex) { // COVERAGE: there is no way to make this happen from test code.
- }
- }
- }
-
-// intentionally commented out so this doesn't affect coverage reports when we're not debugging
-// @Override
-// public String toString() { // for debugging only
-// return ((event == null) ? type.toString() : (type + ": " + event.getClass().getSimpleName())) +
-// (reply == null ? "" : " (sync)");
-// }
- }
-
- /**
- * Takes messages from the input queue, updating the event buffer and summary counters
- * on its own thread.
- */
- static final class EventDispatcher {
- private static final int MAX_FLUSH_THREADS = 5;
- private static final int MESSAGE_BATCH_SIZE = 50;
-
- @VisibleForTesting final EventsConfiguration eventsConfig;
- private final BlockingQueue inbox;
- private final AtomicBoolean closed;
- private final List flushWorkers;
- private final AtomicInteger busyFlushWorkersCount;
- private final AtomicLong lastKnownPastTime = new AtomicLong(0);
- private final AtomicBoolean disabled = new AtomicBoolean(false);
- @VisibleForTesting final DiagnosticAccumulator diagnosticAccumulator;
- private final ExecutorService sharedExecutor;
- private final SendDiagnosticTaskFactory sendDiagnosticTaskFactory;
- private final LDLogger logger;
-
- private long deduplicatedUsers = 0;
-
- private EventDispatcher(
- EventsConfiguration eventsConfig,
- ExecutorService sharedExecutor,
- int threadPriority,
- BlockingQueue inbox,
- AtomicBoolean closed,
- DiagnosticAccumulator diagnosticAccumulator,
- DiagnosticEvent.Init diagnosticInitEvent,
- LDLogger logger
- ) {
- this.eventsConfig = eventsConfig;
- this.inbox = inbox;
- this.closed = closed;
- this.sharedExecutor = sharedExecutor;
- this.diagnosticAccumulator = diagnosticAccumulator;
- this.busyFlushWorkersCount = new AtomicInteger(0);
- this.logger = logger;
-
- ThreadFactory threadFactory = new ThreadFactoryBuilder()
- .setDaemon(true)
- .setNameFormat("LaunchDarkly-event-delivery-%d")
- .setPriority(threadPriority)
- .build();
-
- // This queue only holds one element; it represents a flush task that has not yet been
- // picked up by any worker, so if we try to push another one and are refused, it means
- // all the workers are busy.
- final BlockingQueue payloadQueue = new ArrayBlockingQueue<>(1);
-
- final EventBuffer outbox = new EventBuffer(eventsConfig.capacity, logger);
- final SimpleLRUCache userKeys = new SimpleLRUCache(eventsConfig.userKeysCapacity);
-
- Thread mainThread = threadFactory.newThread(() -> {
- runMainLoop(inbox, outbox, userKeys, payloadQueue);
- });
- mainThread.setDaemon(true);
-
- mainThread.setUncaughtExceptionHandler(this::onUncaughtException);
-
- mainThread.start();
-
- flushWorkers = new ArrayList<>();
- EventResponseListener listener = this::handleResponse;
- for (int i = 0; i < MAX_FLUSH_THREADS; i++) {
- SendEventsTask task = new SendEventsTask(
- eventsConfig,
- listener,
- payloadQueue,
- busyFlushWorkersCount,
- threadFactory,
- logger
- );
- flushWorkers.add(task);
- }
-
- if (diagnosticAccumulator != null) {
- // Set up diagnostics
- this.sendDiagnosticTaskFactory = new SendDiagnosticTaskFactory(eventsConfig, this::handleResponse);
- sharedExecutor.submit(sendDiagnosticTaskFactory.createSendDiagnosticTask(diagnosticInitEvent));
- } else {
- sendDiagnosticTaskFactory = null;
- }
- }
-
- private void onUncaughtException(Thread thread, Throwable e) {
- // The thread's main loop catches all exceptions, so we'll only get here if an Error was thrown.
- // In that case, the application is probably already in a bad state, but we can try to degrade
- // relatively gracefully by performing an orderly shutdown of the event processor, so the
- // application won't end up blocking on a queue that's no longer being consumed.
- // COVERAGE: there is no way to make this happen from test code.
-
- logger.error("Event processor thread was terminated by an unrecoverable error. No more analytics events will be sent. {} {}",
- LogValues.exceptionSummary(e), LogValues.exceptionTrace(e));
- // Note that this is a rare case where we always log the exception stacktrace, instead of only
- // logging it at debug level. That's because an exception of this kind should never happen and,
- // if it happens, may be difficult to debug.
-
- // Flip the switch to prevent DefaultEventProcessor from putting any more messages on the queue
- closed.set(true);
- // Now discard everything that was on the queue, but also make sure no one was blocking on a message
- List messages = new ArrayList();
- inbox.drainTo(messages);
- for (EventProcessorMessage m: messages) {
- m.completed();
- }
- }
-
- /**
- * This task drains the input queue as quickly as possible. Everything here is done on a single
- * thread so we don't have to synchronize on our internal structures; when it's time to flush,
- * triggerFlush will hand the events off to another task.
- */
- private void runMainLoop(BlockingQueue inbox,
- EventBuffer outbox, SimpleLRUCache userKeys,
- BlockingQueue payloadQueue) {
- List batch = new ArrayList(MESSAGE_BATCH_SIZE);
- while (true) {
- try {
- batch.clear();
- batch.add(inbox.take()); // take() blocks until a message is available
- inbox.drainTo(batch, MESSAGE_BATCH_SIZE - 1); // this nonblocking call allows us to pick up more messages if available
- for (EventProcessorMessage message: batch) {
- switch (message.type) { // COVERAGE: adding a default branch does not prevent coverage warnings here due to compiler issues
- case EVENT:
- processEvent(message.event, userKeys, outbox);
- break;
- case FLUSH:
- triggerFlush(outbox, payloadQueue);
- break;
- case FLUSH_USERS:
- userKeys.clear();
- break;
- case DIAGNOSTIC:
- sendAndResetDiagnostics(outbox);
- break;
- case SYNC: // this is used only by unit tests
- waitUntilAllFlushWorkersInactive();
- break;
- case SHUTDOWN:
- doShutdown();
- message.completed();
- return; // deliberately exit the thread loop
- }
- message.completed();
- }
- } catch (InterruptedException e) {
- } catch (Exception e) { // COVERAGE: there is no way to cause this condition in tests
- logger.error("Unexpected error in event processor: {}", e.toString());
- logger.debug(e.toString(), e);
- }
- }
- }
-
- private void sendAndResetDiagnostics(EventBuffer outbox) {
- if (disabled.get()) {
- return;
- }
- long droppedEvents = outbox.getAndClearDroppedCount();
- // We pass droppedEvents and deduplicatedUsers as parameters here because they are updated frequently in the main loop so we want to avoid synchronization on them.
- DiagnosticEvent diagnosticEvent = diagnosticAccumulator.createEventAndReset(droppedEvents, deduplicatedUsers);
- deduplicatedUsers = 0;
- sharedExecutor.submit(sendDiagnosticTaskFactory.createSendDiagnosticTask(diagnosticEvent));
- }
-
- private void doShutdown() {
- waitUntilAllFlushWorkersInactive();
- disabled.set(true); // In case there are any more messages, we want to ignore them
- for (SendEventsTask task: flushWorkers) {
- task.stop();
- }
- try {
- eventsConfig.eventSender.close();
- } catch (IOException e) {
- logger.error("Unexpected error when closing event sender: {}", LogValues.exceptionSummary(e));
- logger.debug(LogValues.exceptionTrace(e));
- }
- }
-
- private void waitUntilAllFlushWorkersInactive() {
- while (true) {
- try {
- synchronized(busyFlushWorkersCount) {
- if (busyFlushWorkersCount.get() == 0) {
- return;
- } else {
- busyFlushWorkersCount.wait();
- }
- }
- } catch (InterruptedException e) {} // COVERAGE: there is no way to cause this condition in tests
- }
- }
-
- private void processEvent(Event e, SimpleLRUCache userKeys, EventBuffer outbox) {
- if (disabled.get()) {
- return;
- }
-
- // Always record the event in the summarizer.
- outbox.addToSummary(e);
-
- // Decide whether to add the event to the payload. Feature events may be added twice, once for
- // the event (if tracked) and once for debugging.
- boolean addIndexEvent = false,
- addFullEvent = false;
- Event debugEvent = null;
-
- if (e instanceof Event.FeatureRequest) {
- Event.FeatureRequest fe = (Event.FeatureRequest)e;
- addFullEvent = fe.isTrackEvents();
- if (shouldDebugEvent(fe)) {
- debugEvent = EventFactory.newDebugEvent(fe);
- }
- } else {
- addFullEvent = true;
- }
-
- // For each user we haven't seen before, we add an index event - unless this is already
- // an identify event for that user.
- if (!addFullEvent || !eventsConfig.inlineUsersInEvents) {
- LDUser user = e.getUser();
- if (user != null && user.getKey() != null) {
- if (e instanceof Event.FeatureRequest || e instanceof Event.Custom) {
- String key = user.getKey();
- // Add to the set of users we've noticed
- boolean alreadySeen = (userKeys.put(key, key) != null);
- if (alreadySeen) {
- deduplicatedUsers++;
- } else {
- addIndexEvent = true;
- }
- } else if (e instanceof Event.Identify) {
- String key = user.getKey();
- userKeys.put(key, key); // just mark that we've seen it
- }
- }
- }
-
- if (addIndexEvent) {
- Event.Index ie = new Event.Index(e.getCreationDate(), e.getUser());
- outbox.add(ie);
- }
- if (addFullEvent) {
- outbox.add(e);
- }
- if (debugEvent != null) {
- outbox.add(debugEvent);
- }
- }
-
- private boolean shouldDebugEvent(Event.FeatureRequest fe) {
- long debugEventsUntilDate = fe.getDebugEventsUntilDate();
- if (debugEventsUntilDate > 0) {
- // The "last known past time" comes from the last HTTP response we got from the server.
- // In case the client's time is set wrong, at least we know that any expiration date
- // earlier than that point is definitely in the past. If there's any discrepancy, we
- // want to err on the side of cutting off event debugging sooner.
- long lastPast = lastKnownPastTime.get();
- if (debugEventsUntilDate > lastPast &&
- debugEventsUntilDate > System.currentTimeMillis()) {
- return true;
- }
- }
- return false;
- }
-
- private void triggerFlush(EventBuffer outbox, BlockingQueue payloadQueue) {
- if (disabled.get() || outbox.isEmpty()) {
- return;
- }
- FlushPayload payload = outbox.getPayload();
- if (diagnosticAccumulator != null) {
- diagnosticAccumulator.recordEventsInBatch(payload.events.length);
- }
- busyFlushWorkersCount.incrementAndGet();
- if (payloadQueue.offer(payload)) {
- // These events now belong to the next available flush worker, so drop them from our state
- outbox.clear();
- } else {
- logger.debug("Skipped flushing because all workers are busy");
- // All the workers are busy so we can't flush now; keep the events in our state
- outbox.summarizer.restoreTo(payload.summary);
- synchronized(busyFlushWorkersCount) {
- busyFlushWorkersCount.decrementAndGet();
- busyFlushWorkersCount.notify();
- }
- }
- }
-
- private void handleResponse(EventSender.Result result) {
- if (result.getTimeFromServer() != null) {
- lastKnownPastTime.set(result.getTimeFromServer().getTime());
- }
- if (result.isMustShutDown()) {
- disabled.set(true);
- }
- }
- }
-
- private static final class EventBuffer {
- final List events = new ArrayList<>();
- final EventSummarizer summarizer = new EventSummarizer();
- private final int capacity;
- private final LDLogger logger;
- private boolean capacityExceeded = false;
- private long droppedEventCount = 0;
-
- EventBuffer(int capacity, LDLogger logger) {
- this.capacity = capacity;
- this.logger = logger;
- }
-
- void add(Event e) {
- if (events.size() >= capacity) {
- if (!capacityExceeded) { // don't need AtomicBoolean, this is only checked on one thread
- capacityExceeded = true;
- logger.warn("Exceeded event queue capacity. Increase capacity to avoid dropping events.");
- }
- droppedEventCount++;
- } else {
- capacityExceeded = false;
- events.add(e);
- }
- }
-
- void addToSummary(Event e) {
- summarizer.summarizeEvent(e);
- }
-
- boolean isEmpty() {
- return events.isEmpty() && summarizer.isEmpty();
- }
-
- long getAndClearDroppedCount() {
- long res = droppedEventCount;
- droppedEventCount = 0;
- return res;
- }
-
- FlushPayload getPayload() {
- Event[] eventsOut = events.toArray(new Event[events.size()]);
- EventSummarizer.EventSummary summary = summarizer.getSummaryAndReset();
- return new FlushPayload(eventsOut, summary);
- }
-
- void clear() {
- events.clear();
- summarizer.clear();
- }
- }
-
- private static final class FlushPayload {
- final Event[] events;
- final EventSummary summary;
-
- FlushPayload(Event[] events, EventSummary summary) {
- this.events = events;
- this.summary = summary;
- }
- }
-
- private static interface EventResponseListener {
- void handleResponse(EventSender.Result result);
- }
-
- private static final class SendEventsTask implements Runnable {
- private final EventsConfiguration eventsConfig;
- private final EventResponseListener responseListener;
- private final BlockingQueue payloadQueue;
- private final AtomicInteger activeFlushWorkersCount;
- private final AtomicBoolean stopping;
- private final EventOutputFormatter formatter;
- private final Thread thread;
- private final LDLogger logger;
-
- SendEventsTask(
- EventsConfiguration eventsConfig,
- EventResponseListener responseListener,
- BlockingQueue payloadQueue,
- AtomicInteger activeFlushWorkersCount,
- ThreadFactory threadFactory,
- LDLogger logger
- ) {
- this.eventsConfig = eventsConfig;
- this.formatter = new EventOutputFormatter(eventsConfig);
- this.responseListener = responseListener;
- this.payloadQueue = payloadQueue;
- this.activeFlushWorkersCount = activeFlushWorkersCount;
- this.stopping = new AtomicBoolean(false);
- this.logger = logger;
- thread = threadFactory.newThread(this);
- thread.setDaemon(true);
- thread.start();
- }
-
- public void run() {
- while (!stopping.get()) {
- FlushPayload payload = null;
- try {
- payload = payloadQueue.take();
- } catch (InterruptedException e) {
- continue;
- }
- try {
- StringWriter stringWriter = new StringWriter();
- int outputEventCount = formatter.writeOutputEvents(payload.events, payload.summary, stringWriter);
- EventSender.Result result = eventsConfig.eventSender.sendEventData(
- EventDataKind.ANALYTICS,
- stringWriter.toString(),
- outputEventCount,
- eventsConfig.eventsUri
- );
- responseListener.handleResponse(result);
- } catch (Exception e) {
- logger.error("Unexpected error in event processor: {}", LogValues.exceptionSummary(e));
- logger.debug(LogValues.exceptionTrace(e));
- }
- synchronized (activeFlushWorkersCount) {
- activeFlushWorkersCount.decrementAndGet();
- activeFlushWorkersCount.notifyAll();
- }
- }
- }
-
- void stop() {
- stopping.set(true);
- thread.interrupt();
- }
- }
-
- private static final class SendDiagnosticTaskFactory {
- private final EventsConfiguration eventsConfig;
- private final EventResponseListener eventResponseListener;
-
- SendDiagnosticTaskFactory(
- EventsConfiguration eventsConfig,
- EventResponseListener eventResponseListener
- ) {
- this.eventsConfig = eventsConfig;
- this.eventResponseListener = eventResponseListener;
- }
-
- Runnable createSendDiagnosticTask(final DiagnosticEvent diagnosticEvent) {
- return new Runnable() {
- @Override
- public void run() {
- String json = JsonHelpers.serialize(diagnosticEvent);
- EventSender.Result result = eventsConfig.eventSender.sendEventData(EventDataKind.DIAGNOSTICS,
- json, 1, eventsConfig.eventsUri);
- if (eventResponseListener != null) {
- eventResponseListener.handleResponse(result);
- }
- }
- };
- }
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/DefaultEventProcessorWrapper.java b/src/main/java/com/launchdarkly/sdk/server/DefaultEventProcessorWrapper.java
new file mode 100644
index 000000000..b37497a6b
--- /dev/null
+++ b/src/main/java/com/launchdarkly/sdk/server/DefaultEventProcessorWrapper.java
@@ -0,0 +1,70 @@
+package com.launchdarkly.sdk.server;
+
+import com.launchdarkly.logging.LDLogger;
+import com.launchdarkly.sdk.EvaluationReason;
+import com.launchdarkly.sdk.LDContext;
+import com.launchdarkly.sdk.LDValue;
+import com.launchdarkly.sdk.internal.events.DefaultEventProcessor;
+import com.launchdarkly.sdk.internal.events.Event;
+import com.launchdarkly.sdk.internal.events.EventsConfiguration;
+import com.launchdarkly.sdk.server.subsystems.ClientContext;
+import com.launchdarkly.sdk.server.subsystems.EventProcessor;
+
+import java.io.IOException;
+
+final class DefaultEventProcessorWrapper implements EventProcessor {
+ private final DefaultEventProcessor eventProcessor;
+ final EventsConfiguration eventsConfig; // visible for testing
+
+ DefaultEventProcessorWrapper(ClientContext clientContext, EventsConfiguration eventsConfig) {
+ this.eventsConfig = eventsConfig;
+ LDLogger baseLogger = clientContext.getBaseLogger();
+ LDLogger logger = baseLogger.subLogger(Loggers.EVENTS_LOGGER_NAME);
+ eventProcessor = new DefaultEventProcessor(
+ eventsConfig,
+ ClientContextImpl.get(clientContext).sharedExecutor,
+ clientContext.getThreadPriority(),
+ logger
+ );
+ }
+
+ @Override
+ public void recordEvaluationEvent(LDContext context, String flagKey, int flagVersion, int variation,
+ LDValue value, EvaluationReason reason, LDValue defaultValue, String prerequisiteOfFlagKey,
+ boolean requireFullEvent, Long debugEventsUntilDate) {
+ eventProcessor.sendEvent(new Event.FeatureRequest(
+ System.currentTimeMillis(),
+ flagKey,
+ context,
+ flagVersion,
+ variation,
+ value,
+ defaultValue,
+ reason,
+ prerequisiteOfFlagKey,
+ requireFullEvent,
+ debugEventsUntilDate,
+ false
+ ));
+ }
+
+ @Override
+ public void recordIdentifyEvent(LDContext context) {
+ eventProcessor.sendEvent(new Event.Identify(System.currentTimeMillis(), context));
+ }
+
+ @Override
+ public void recordCustomEvent(LDContext context, String eventKey, LDValue data, Double metricValue) {
+ eventProcessor.sendEvent(new Event.Custom(System.currentTimeMillis(), eventKey, context, data, metricValue));
+ }
+
+ @Override
+ public void flush() {
+ eventProcessor.flushAsync();
+ }
+
+ @Override
+ public void close() throws IOException {
+ eventProcessor.close();
+ }
+}
diff --git a/src/main/java/com/launchdarkly/sdk/server/DefaultEventSender.java b/src/main/java/com/launchdarkly/sdk/server/DefaultEventSender.java
deleted file mode 100644
index 7c2e4569e..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/DefaultEventSender.java
+++ /dev/null
@@ -1,181 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import com.launchdarkly.logging.LDLogger;
-import com.launchdarkly.sdk.server.interfaces.BasicConfiguration;
-import com.launchdarkly.sdk.server.interfaces.EventSender;
-import com.launchdarkly.sdk.server.interfaces.EventSenderFactory;
-import com.launchdarkly.sdk.server.interfaces.HttpConfiguration;
-
-import java.io.IOException;
-import java.net.URI;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.time.Duration;
-import java.util.Date;
-import java.util.Locale;
-import java.util.UUID;
-
-import static com.launchdarkly.sdk.server.Util.checkIfErrorIsRecoverableAndLog;
-import static com.launchdarkly.sdk.server.Util.concatenateUriPath;
-import static com.launchdarkly.sdk.server.Util.configureHttpClientBuilder;
-import static com.launchdarkly.sdk.server.Util.describeDuration;
-import static com.launchdarkly.sdk.server.Util.getHeadersBuilderFor;
-import static com.launchdarkly.sdk.server.Util.httpErrorDescription;
-import static com.launchdarkly.sdk.server.Util.shutdownHttpClient;
-
-import okhttp3.Headers;
-import okhttp3.MediaType;
-import okhttp3.OkHttpClient;
-import okhttp3.Request;
-import okhttp3.RequestBody;
-import okhttp3.Response;
-
-final class DefaultEventSender implements EventSender {
- static final Duration DEFAULT_RETRY_DELAY = Duration.ofSeconds(1);
- private static final String EVENT_SCHEMA_HEADER = "X-LaunchDarkly-Event-Schema";
- private static final String EVENT_SCHEMA_VERSION = "3";
- private static final String EVENT_PAYLOAD_ID_HEADER = "X-LaunchDarkly-Payload-ID";
- private static final MediaType JSON_CONTENT_TYPE = MediaType.parse("application/json; charset=utf-8");
- private static final SimpleDateFormat HTTP_DATE_FORMAT = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz",
- Locale.US); // server dates as defined by RFC-822/RFC-1123 use English day/month names
- private static final Object HTTP_DATE_FORMAT_LOCK = new Object(); // synchronize on this because DateFormat isn't thread-safe
-
- private final OkHttpClient httpClient;
- private final Headers baseHeaders;
- final Duration retryDelay; // visible for testing
- private final LDLogger logger;
-
- DefaultEventSender(
- HttpConfiguration httpConfiguration,
- Duration retryDelay,
- LDLogger logger
- ) {
- OkHttpClient.Builder httpBuilder = new OkHttpClient.Builder();
- configureHttpClientBuilder(httpConfiguration, httpBuilder);
- this.httpClient = httpBuilder.build();
- this.logger = logger;
-
- this.baseHeaders = getHeadersBuilderFor(httpConfiguration)
- .add("Content-Type", "application/json")
- .build();
-
- this.retryDelay = retryDelay == null ? DEFAULT_RETRY_DELAY : retryDelay;
- }
-
- @Override
- public void close() throws IOException {
- shutdownHttpClient(httpClient);
- }
-
- @Override
- public Result sendEventData(EventDataKind kind, String data, int eventCount, URI eventsBaseUri) {
- if (data == null || data.isEmpty()) {
- // DefaultEventProcessor won't normally pass us an empty payload, but if it does, don't bother sending
- return new Result(true, false, null);
- }
-
- Headers.Builder headersBuilder = baseHeaders.newBuilder();
- String path;
- String description;
-
- switch (kind) {
- case ANALYTICS:
- path = StandardEndpoints.ANALYTICS_EVENTS_POST_REQUEST_PATH;
- String eventPayloadId = UUID.randomUUID().toString();
- headersBuilder.add(EVENT_PAYLOAD_ID_HEADER, eventPayloadId);
- headersBuilder.add(EVENT_SCHEMA_HEADER, EVENT_SCHEMA_VERSION);
- description = String.format("%d event(s)", eventCount);
- break;
- case DIAGNOSTICS:
- path = StandardEndpoints.DIAGNOSTIC_EVENTS_POST_REQUEST_PATH;
- description = "diagnostic event";
- break;
- default:
- throw new IllegalArgumentException("kind"); // COVERAGE: unreachable code, those are the only enum values
- }
-
- URI uri = concatenateUriPath(eventsBaseUri, path);
- Headers headers = headersBuilder.build();
- RequestBody body = RequestBody.create(data, JSON_CONTENT_TYPE);
- boolean mustShutDown = false;
-
- logger.debug("Posting {} to {} with payload: {}", description, uri, data);
-
- for (int attempt = 0; attempt < 2; attempt++) {
- if (attempt > 0) {
- logger.warn("Will retry posting {} after {}", description, describeDuration(retryDelay));
- try {
- Thread.sleep(retryDelay.toMillis());
- } catch (InterruptedException e) { // COVERAGE: there's no way to cause this in tests
- }
- }
-
- Request request = new Request.Builder()
- .url(uri.toASCIIString())
- .post(body)
- .headers(headers)
- .build();
-
- long startTime = System.currentTimeMillis();
- String nextActionMessage = attempt == 0 ? "will retry" : "some events were dropped";
- String errorContext = "posting " + description;
-
- try (Response response = httpClient.newCall(request).execute()) {
- long endTime = System.currentTimeMillis();
- logger.debug("{} delivery took {} ms, response status {}", description, endTime - startTime, response.code());
-
- if (response.isSuccessful()) {
- return new Result(true, false, parseResponseDate(response));
- }
-
- String errorDesc = httpErrorDescription(response.code());
- boolean recoverable = checkIfErrorIsRecoverableAndLog(
- logger,
- errorDesc,
- errorContext,
- response.code(),
- nextActionMessage
- );
- if (!recoverable) {
- mustShutDown = true;
- break;
- }
- } catch (IOException e) {
- checkIfErrorIsRecoverableAndLog(logger, e.toString(), errorContext, 0, nextActionMessage);
- }
- }
-
- return new Result(false, mustShutDown, null);
- }
-
- private final Date parseResponseDate(Response response) {
- String dateStr = response.header("Date");
- if (dateStr != null) {
- try {
- // DateFormat is not thread-safe, so must synchronize
- synchronized (HTTP_DATE_FORMAT_LOCK) {
- return HTTP_DATE_FORMAT.parse(dateStr);
- }
- } catch (ParseException e) {
- logger.warn("Received invalid Date header from events service");
- }
- }
- return null;
- }
-
- static final class Factory implements EventSenderFactory {
- @Override
- public EventSender createEventSender(BasicConfiguration basicConfiguration, HttpConfiguration httpConfiguration) {
- return new DefaultEventSender(httpConfiguration, DefaultEventSender.DEFAULT_RETRY_DELAY,
- LDLogger.none());
- }
-
- @Override
- public EventSender createEventSender(
- BasicConfiguration basicConfiguration,
- HttpConfiguration httpConfiguration,
- LDLogger logger) {
- return new DefaultEventSender(httpConfiguration, DefaultEventSender.DEFAULT_RETRY_DELAY, logger);
- }
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/DefaultFeatureRequestor.java b/src/main/java/com/launchdarkly/sdk/server/DefaultFeatureRequestor.java
index 7fb7f1223..9668316e1 100644
--- a/src/main/java/com/launchdarkly/sdk/server/DefaultFeatureRequestor.java
+++ b/src/main/java/com/launchdarkly/sdk/server/DefaultFeatureRequestor.java
@@ -3,10 +3,12 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.gson.stream.JsonReader;
import com.launchdarkly.logging.LDLogger;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.FullDataSet;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.ItemDescriptor;
-import com.launchdarkly.sdk.server.interfaces.HttpConfiguration;
-import com.launchdarkly.sdk.server.interfaces.SerializationException;
+import com.launchdarkly.sdk.internal.http.HttpErrors.HttpErrorException;
+import com.launchdarkly.sdk.internal.http.HttpHelpers;
+import com.launchdarkly.sdk.internal.http.HttpProperties;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor;
+import com.launchdarkly.sdk.server.subsystems.SerializationException;
import java.io.IOException;
import java.net.URI;
@@ -14,10 +16,6 @@
import java.nio.file.Path;
import static com.launchdarkly.sdk.server.DataModelSerialization.parseFullDataSet;
-import static com.launchdarkly.sdk.server.Util.concatenateUriPath;
-import static com.launchdarkly.sdk.server.Util.configureHttpClientBuilder;
-import static com.launchdarkly.sdk.server.Util.getHeadersBuilderFor;
-import static com.launchdarkly.sdk.server.Util.shutdownHttpClient;
import okhttp3.Cache;
import okhttp3.Headers;
@@ -38,14 +36,13 @@ final class DefaultFeatureRequestor implements FeatureRequestor {
private final Path cacheDir;
private final LDLogger logger;
- DefaultFeatureRequestor(HttpConfiguration httpConfig, URI baseUri, LDLogger logger) {
+ DefaultFeatureRequestor(HttpProperties httpProperties, URI baseUri, LDLogger logger) {
this.baseUri = baseUri;
- this.pollingUri = concatenateUriPath(baseUri, StandardEndpoints.POLLING_REQUEST_PATH);
+ this.pollingUri = HttpHelpers.concatenateUriPath(baseUri, StandardEndpoints.POLLING_REQUEST_PATH);
this.logger = logger;
- OkHttpClient.Builder httpBuilder = new OkHttpClient.Builder();
- configureHttpClientBuilder(httpConfig, httpBuilder);
- this.headers = getHeadersBuilderFor(httpConfig).build();
+ OkHttpClient.Builder httpBuilder = httpProperties.toHttpClientBuilder();
+ this.headers = httpProperties.toHeadersBuilder().build();
try {
cacheDir = Files.createTempDirectory("LaunchDarklySDK");
@@ -59,7 +56,7 @@ final class DefaultFeatureRequestor implements FeatureRequestor {
}
public void close() {
- shutdownHttpClient(httpClient);
+ HttpProperties.shutdownHttpClient(httpClient);
Util.deleteDirectory(cacheDir);
}
@@ -82,10 +79,11 @@ public FullDataSet getAllData(boolean returnDataEvenIfCached)
return null;
}
- logger.debug("Get flag(s) response: " + response.toString());
- logger.debug("Network response: " + response.networkResponse());
- logger.debug("Cache hit count: " + httpClient.cache().hitCount() + " Cache network Count: " + httpClient.cache().networkCount());
- logger.debug("Cache response: " + response.cacheResponse());
+ logger.debug("Get flag(s) response: {}", response);
+ logger.debug("Network response: {}", response.networkResponse());
+ logger.debug("Cache hit count: {} Cache network count: {}",
+ httpClient.cache().hitCount(), httpClient.cache().networkCount());
+ logger.debug("Cache response: {}", response.cacheResponse());
if (!response.isSuccessful()) {
throw new HttpErrorException(response.code());
diff --git a/src/main/java/com/launchdarkly/sdk/server/DiagnosticAccumulator.java b/src/main/java/com/launchdarkly/sdk/server/DiagnosticAccumulator.java
deleted file mode 100644
index cea391e90..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/DiagnosticAccumulator.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.atomic.AtomicInteger;
-
-class DiagnosticAccumulator {
-
- final DiagnosticId diagnosticId;
- volatile long dataSinceDate;
- private final AtomicInteger eventsInLastBatch = new AtomicInteger(0);
- private final Object streamInitsLock = new Object();
- private ArrayList streamInits = new ArrayList<>();
-
- DiagnosticAccumulator(DiagnosticId diagnosticId) {
- this.diagnosticId = diagnosticId;
- this.dataSinceDate = System.currentTimeMillis();
- }
-
- void recordStreamInit(long timestamp, long durationMillis, boolean failed) {
- synchronized (streamInitsLock) {
- streamInits.add(new DiagnosticEvent.StreamInit(timestamp, durationMillis, failed));
- }
- }
-
- void recordEventsInBatch(int eventsInBatch) {
- eventsInLastBatch.set(eventsInBatch);
- }
-
- DiagnosticEvent.Statistics createEventAndReset(long droppedEvents, long deduplicatedUsers) {
- long currentTime = System.currentTimeMillis();
- List eventInits;
- synchronized (streamInitsLock) {
- eventInits = streamInits;
- streamInits = new ArrayList<>();
- }
- long eventsInBatch = eventsInLastBatch.getAndSet(0);
- DiagnosticEvent.Statistics res = new DiagnosticEvent.Statistics(currentTime, diagnosticId, dataSinceDate, droppedEvents,
- deduplicatedUsers, eventsInBatch, eventInits);
- dataSinceDate = currentTime;
- return res;
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/DiagnosticEvent.java b/src/main/java/com/launchdarkly/sdk/server/DiagnosticEvent.java
deleted file mode 100644
index c5803f6a9..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/DiagnosticEvent.java
+++ /dev/null
@@ -1,202 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import com.launchdarkly.sdk.LDValue;
-import com.launchdarkly.sdk.LDValueType;
-import com.launchdarkly.sdk.ObjectBuilder;
-import com.launchdarkly.sdk.server.interfaces.BasicConfiguration;
-import com.launchdarkly.sdk.server.interfaces.DiagnosticDescription;
-import com.launchdarkly.sdk.server.interfaces.HttpConfiguration;
-
-import java.util.List;
-import java.util.Map;
-
-class DiagnosticEvent {
- static enum ConfigProperty {
- ALL_ATTRIBUTES_PRIVATE("allAttributesPrivate", LDValueType.BOOLEAN),
- CUSTOM_BASE_URI("customBaseURI", LDValueType.BOOLEAN),
- CUSTOM_EVENTS_URI("customEventsURI", LDValueType.BOOLEAN),
- CUSTOM_STREAM_URI("customStreamURI", LDValueType.BOOLEAN),
- DIAGNOSTIC_RECORDING_INTERVAL_MILLIS("diagnosticRecordingIntervalMillis", LDValueType.NUMBER),
- EVENTS_CAPACITY("eventsCapacity", LDValueType.NUMBER),
- EVENTS_FLUSH_INTERVAL_MILLIS("eventsFlushIntervalMillis", LDValueType.NUMBER),
- INLINE_USERS_IN_EVENTS("inlineUsersInEvents", LDValueType.BOOLEAN),
- POLLING_INTERVAL_MILLIS("pollingIntervalMillis", LDValueType.NUMBER),
- RECONNECT_TIME_MILLIS("reconnectTimeMillis", LDValueType.NUMBER),
- SAMPLING_INTERVAL("samplingInterval", LDValueType.NUMBER),
- STREAMING_DISABLED("streamingDisabled", LDValueType.BOOLEAN),
- USER_KEYS_CAPACITY("userKeysCapacity", LDValueType.NUMBER),
- USER_KEYS_FLUSH_INTERVAL_MILLIS("userKeysFlushIntervalMillis", LDValueType.NUMBER),
- USING_RELAY_DAEMON("usingRelayDaemon", LDValueType.BOOLEAN);
-
- String name;
- LDValueType type;
-
- private ConfigProperty(String name, LDValueType type) {
- this.name = name;
- this.type = type;
- }
- }
-
- final String kind;
- final long creationDate;
- final DiagnosticId id;
-
- DiagnosticEvent(String kind, long creationDate, DiagnosticId id) {
- this.kind = kind;
- this.creationDate = creationDate;
- this.id = id;
- }
-
- static class StreamInit {
- long timestamp;
- long durationMillis;
- boolean failed;
-
- StreamInit(long timestamp, long durationMillis, boolean failed) {
- this.timestamp = timestamp;
- this.durationMillis = durationMillis;
- this.failed = failed;
- }
- }
-
- static class Statistics extends DiagnosticEvent {
-
- final long dataSinceDate;
- final long droppedEvents;
- final long deduplicatedUsers;
- final long eventsInLastBatch;
- final List streamInits;
-
- Statistics(long creationDate, DiagnosticId id, long dataSinceDate, long droppedEvents, long deduplicatedUsers,
- long eventsInLastBatch, List streamInits) {
- super("diagnostic", creationDate, id);
- this.dataSinceDate = dataSinceDate;
- this.droppedEvents = droppedEvents;
- this.deduplicatedUsers = deduplicatedUsers;
- this.eventsInLastBatch = eventsInLastBatch;
- this.streamInits = streamInits;
- }
- }
-
- static class Init extends DiagnosticEvent {
- final DiagnosticSdk sdk;
- final LDValue configuration;
- final DiagnosticPlatform platform = new DiagnosticPlatform();
-
- Init(
- long creationDate,
- DiagnosticId diagnosticId,
- LDConfig config,
- BasicConfiguration basicConfig,
- HttpConfiguration httpConfig
- ) {
- super("diagnostic-init", creationDate, diagnosticId);
- this.sdk = new DiagnosticSdk(httpConfig);
- this.configuration = getConfigurationData(config, basicConfig, httpConfig);
- }
-
- static LDValue getConfigurationData(LDConfig config, BasicConfiguration basicConfig, HttpConfiguration httpConfig) {
- ObjectBuilder builder = LDValue.buildObject();
-
- // Add the top-level properties that are not specific to a particular component type.
- builder.put("connectTimeoutMillis", httpConfig.getConnectTimeout().toMillis());
- builder.put("socketTimeoutMillis", httpConfig.getSocketTimeout().toMillis());
- builder.put("usingProxy", httpConfig.getProxy() != null);
- builder.put("usingProxyAuthenticator", httpConfig.getProxyAuthentication() != null);
- builder.put("startWaitMillis", config.startWait.toMillis());
-
- // Allow each pluggable component to describe its own relevant properties.
- mergeComponentProperties(builder, config.dataStoreFactory, basicConfig, "dataStoreType");
- mergeComponentProperties(builder, config.dataSourceFactory, basicConfig, null);
- mergeComponentProperties(builder, config.eventProcessorFactory, basicConfig, null);
- return builder.build();
- }
-
- // Attempts to add relevant configuration properties, if any, from a customizable component:
- // - If the component does not implement DiagnosticDescription, set the defaultPropertyName property to "custom".
- // - If it does implement DiagnosticDescription, call its describeConfiguration() method to get a value.
- // - If the value is a string, then set the defaultPropertyName property to that value.
- // - If the value is an object, then copy all of its properties as long as they are ones we recognize
- // and have the expected type.
- private static void mergeComponentProperties(
- ObjectBuilder builder,
- Object component,
- BasicConfiguration basicConfig,
- String defaultPropertyName
- ) {
- if (!(component instanceof DiagnosticDescription)) {
- if (defaultPropertyName != null) {
- builder.put(defaultPropertyName, "custom");
- }
- return;
- }
- LDValue componentDesc = LDValue.normalize(((DiagnosticDescription)component).describeConfiguration(basicConfig));
- if (defaultPropertyName != null) {
- builder.put(defaultPropertyName, componentDesc.isString() ? componentDesc.stringValue() : "custom");
- } else if (componentDesc.getType() == LDValueType.OBJECT) {
- for (String key: componentDesc.keys()) {
- for (ConfigProperty prop: ConfigProperty.values()) {
- if (prop.name.equals(key)) {
- LDValue value = componentDesc.get(key);
- if (value.getType() == prop.type) {
- builder.put(key, value);
- }
- }
- }
- }
- }
- }
-
- static class DiagnosticSdk {
- final String name = "java-server-sdk";
- final String version = Version.SDK_VERSION;
- final String wrapperName;
- final String wrapperVersion;
-
- DiagnosticSdk(HttpConfiguration httpConfig) {
- for (Map.Entry headers: httpConfig.getDefaultHeaders()) {
- if (headers.getKey().equalsIgnoreCase("X-LaunchDarkly-Wrapper") ) {
- String id = headers.getValue();
- if (id.indexOf("/") >= 0) {
- this.wrapperName = id.substring(0, id.indexOf("/"));
- this.wrapperVersion = id.substring(id.indexOf("/") + 1);
- } else {
- this.wrapperName = id;
- this.wrapperVersion = null;
- }
- return;
- }
- }
- this.wrapperName = null;
- this.wrapperVersion = null;
- }
- }
-
- @SuppressWarnings("unused") // fields are for JSON serialization only
- static class DiagnosticPlatform {
- private final String name = "Java";
- private final String javaVendor = System.getProperty("java.vendor");
- private final String javaVersion = System.getProperty("java.version");
- private final String osArch = System.getProperty("os.arch");
- final String osName = normalizeOsName(System.getProperty("os.name")); // visible for tests
- private final String osVersion = System.getProperty("os.version");
-
- DiagnosticPlatform() {
- }
-
- private static String normalizeOsName(String osName) {
- // For our diagnostics data, we prefer the standard names "Linux", "MacOS", and "Windows".
- // "Linux" is already what the JRE returns in Linux. In Windows, we get "Windows 10" etc.
- if (osName != null) {
- if (osName.equals("Mac OS X")) {
- return "MacOS";
- }
- if (osName.startsWith("Windows")) {
- return "Windows";
- }
- }
- return osName;
- }
- }
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/DiagnosticId.java b/src/main/java/com/launchdarkly/sdk/server/DiagnosticId.java
deleted file mode 100644
index 8601a9780..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/DiagnosticId.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import java.util.UUID;
-
-class DiagnosticId {
-
- final String diagnosticId = UUID.randomUUID().toString();
- final String sdkKeySuffix;
-
- DiagnosticId(String sdkKey) {
- if (sdkKey == null) {
- sdkKeySuffix = null;
- } else {
- this.sdkKeySuffix = sdkKey.substring(Math.max(0, sdkKey.length() - 6));
- }
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/Evaluator.java b/src/main/java/com/launchdarkly/sdk/server/Evaluator.java
index 3c410f5a2..76280a1db 100644
--- a/src/main/java/com/launchdarkly/sdk/server/Evaluator.java
+++ b/src/main/java/com/launchdarkly/sdk/server/Evaluator.java
@@ -1,9 +1,12 @@
package com.launchdarkly.sdk.server;
import com.launchdarkly.logging.LDLogger;
+import com.launchdarkly.sdk.AttributeRef;
+import com.launchdarkly.sdk.ContextKind;
import com.launchdarkly.sdk.EvaluationReason;
+import com.launchdarkly.sdk.EvaluationReason.ErrorKind;
import com.launchdarkly.sdk.EvaluationReason.Kind;
-import com.launchdarkly.sdk.LDUser;
+import com.launchdarkly.sdk.LDContext;
import com.launchdarkly.sdk.LDValue;
import com.launchdarkly.sdk.LDValueType;
import com.launchdarkly.sdk.server.DataModel.Clause;
@@ -17,13 +20,19 @@
import com.launchdarkly.sdk.server.DataModel.Target;
import com.launchdarkly.sdk.server.DataModel.VariationOrRollout;
import com.launchdarkly.sdk.server.DataModel.WeightedVariation;
-import com.launchdarkly.sdk.server.DataModelPreprocessing.ClausePreprocessed;
-import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreTypes;
+import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes;
+import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
-import java.util.Set;
+import java.util.Map;
-import static com.launchdarkly.sdk.server.EvaluatorBucketing.bucketUser;
+import static com.launchdarkly.sdk.server.EvaluatorBucketing.computeBucketValue;
+import static com.launchdarkly.sdk.server.EvaluatorHelpers.contextKeyIsInTargetList;
+import static com.launchdarkly.sdk.server.EvaluatorHelpers.contextKeyIsInTargetLists;
+import static com.launchdarkly.sdk.server.EvaluatorHelpers.matchClauseByKind;
+import static com.launchdarkly.sdk.server.EvaluatorHelpers.matchClauseWithoutSegments;
+import static com.launchdarkly.sdk.server.EvaluatorHelpers.maybeNegate;
/**
* Encapsulates the feature flag evaluation logic. The Evaluator has no knowledge of the rest of the SDK environment;
@@ -86,17 +95,33 @@ static interface PrerequisiteEvaluationSink {
void recordPrerequisiteEvaluation(
FeatureFlag flag,
FeatureFlag prereqOfFlag,
- LDUser user,
+ LDContext context,
EvalResult result
);
}
+ /**
+ * Represents errors that should terminate evaluation, for situations where it's simpler to use throw/catch
+ * than to return an error result back up a call chain.
+ */
+ @SuppressWarnings("serial")
+ static class EvaluationException extends RuntimeException {
+ final ErrorKind errorKind;
+
+ EvaluationException(ErrorKind errorKind, String message) {
+ this.errorKind = errorKind;
+ }
+ }
+
/**
* This object holds mutable state that Evaluator may need during an evaluation.
*/
private static class EvaluatorState {
- private BigSegmentStoreTypes.Membership bigSegmentsMembership = null;
+ private Map bigSegmentsMembership = null;
private EvaluationReason.BigSegmentsStatus bigSegmentsStatus = null;
+ private FeatureFlag originalFlag = null;
+ private List prerequisiteStack = null;
+ private List segmentStack = null;
}
Evaluator(Getters getters, LDLogger logger) {
@@ -108,52 +133,56 @@ private static class EvaluatorState {
* The client's entry point for evaluating a flag. No other Evaluator methods should be exposed.
*
* @param flag an existing feature flag; any other referenced flags or segments will be queried via {@link Getters}
- * @param user the user to evaluate against
+ * @param context the evaluation context
* @param eventFactory produces feature request events
* @return an {@link EvalResult} - guaranteed non-null
*/
- EvalResult evaluate(FeatureFlag flag, LDUser user, PrerequisiteEvaluationSink prereqEvals) {
+ EvalResult evaluate(FeatureFlag flag, LDContext context, PrerequisiteEvaluationSink prereqEvals) {
if (flag.getKey() == INVALID_FLAG_KEY_THAT_THROWS_EXCEPTION) {
throw EXPECTED_EXCEPTION_FROM_INVALID_FLAG;
}
- if (user == null || user.getKey() == null) {
- // this should have been prevented by LDClient.evaluateInternal
- logger.warn("Null user or null user key when evaluating flag \"{}\"; returning null", flag.getKey());
- return EvalResult.error(EvaluationReason.ErrorKind.USER_NOT_SPECIFIED);
+ if (context == null || !context.isValid()) {
+ // This would be a serious logic error on our part, rather than an application error, since LDClient
+ // should never be passing a null or invalid context to Evaluator; the SDK should have rejected that
+ // at a higher level. So we will report it as EXCEPTION to differentiate it from application errors.
+ logger.error("Null or invalid context was unexpectedly passed to evaluator");
+ return EvalResult.error(EvaluationReason.ErrorKind.EXCEPTION);
}
EvaluatorState state = new EvaluatorState();
+ state.originalFlag = flag;
- EvalResult result = evaluateInternal(flag, user, prereqEvals, state);
-
- if (state.bigSegmentsStatus != null) {
- return result.withReason(
- result.getReason().withBigSegmentsStatus(state.bigSegmentsStatus)
- );
+ try {
+ EvalResult result = evaluateInternal(flag, context, prereqEvals, state);
+
+ if (state.bigSegmentsStatus != null) {
+ return result.withReason(
+ result.getReason().withBigSegmentsStatus(state.bigSegmentsStatus)
+ );
+ }
+ return result;
+ } catch (EvaluationException e) {
+ logger.error("Could not evaluate flag \"{}\": {}", flag.getKey(), e.getMessage());
+ return EvalResult.error(e.errorKind);
}
- return result;
}
- private EvalResult evaluateInternal(FeatureFlag flag, LDUser user,
+ private EvalResult evaluateInternal(FeatureFlag flag, LDContext context,
PrerequisiteEvaluationSink prereqEvals, EvaluatorState state) {
if (!flag.isOn()) {
return EvaluatorHelpers.offResult(flag);
}
- EvalResult prereqFailureResult = checkPrerequisites(flag, user, prereqEvals, state);
+ EvalResult prereqFailureResult = checkPrerequisites(flag, context, prereqEvals, state);
if (prereqFailureResult != null) {
return prereqFailureResult;
}
// Check to see if targets match
- List targets = flag.getTargets(); // guaranteed non-null
- int nTargets = targets.size();
- for (int i = 0; i < nTargets; i++) {
- Target target = targets.get(i);
- if (target.getValues().contains(user.getKey())) { // getValues() is guaranteed non-null
- return EvaluatorHelpers.targetMatchResult(flag, target);
- }
+ EvalResult targetMatchResult = checkTargets(flag, context);
+ if (targetMatchResult != null) {
+ return targetMatchResult;
}
// Now walk through the rules and see if any match
@@ -161,51 +190,134 @@ private EvalResult evaluateInternal(FeatureFlag flag, LDUser user,
int nRules = rules.size();
for (int i = 0; i < nRules; i++) {
Rule rule = rules.get(i);
- if (ruleMatchesUser(flag, rule, user, state)) {
- return computeRuleMatch(flag, user, rule, i);
+ if (ruleMatchesContext(flag, rule, context, state)) {
+ return computeRuleMatch(flag, context, rule, i);
}
}
// Walk through the fallthrough and see if it matches
- return getValueForVariationOrRollout(flag, flag.getFallthrough(), user,
+ return getValueForVariationOrRollout(flag, flag.getFallthrough(), context,
flag.preprocessed == null ? null : flag.preprocessed.fallthroughResults,
EvaluationReason.fallthrough());
}
// Checks prerequisites if any; returns null if successful, or an EvalResult if we have to
// short-circuit due to a prerequisite failure.
- private EvalResult checkPrerequisites(FeatureFlag flag, LDUser user,
+ private EvalResult checkPrerequisites(FeatureFlag flag, LDContext context,
PrerequisiteEvaluationSink prereqEvals, EvaluatorState state) {
List prerequisites = flag.getPrerequisites(); // guaranteed non-null
int nPrerequisites = prerequisites.size();
- for (int i = 0; i < nPrerequisites; i++) {
- Prerequisite prereq = prerequisites.get(i);
- boolean prereqOk = true;
- FeatureFlag prereqFeatureFlag = getters.getFlag(prereq.getKey());
- if (prereqFeatureFlag == null) {
- logger.error("Could not retrieve prerequisite flag \"{}\" when evaluating \"{}\"", prereq.getKey(), flag.getKey());
- prereqOk = false;
- } else {
- EvalResult prereqEvalResult = evaluateInternal(prereqFeatureFlag, user, prereqEvals, state);
- // Note that if the prerequisite flag is off, we don't consider it a match no matter what its
- // off variation was. But we still need to evaluate it in order to generate an event.
- if (!prereqFeatureFlag.isOn() || prereqEvalResult.getVariationIndex() != prereq.getVariation()) {
+ if (nPrerequisites == 0) {
+ return null;
+ }
+
+ try {
+ // We use the state object to guard against circular references in prerequisites. To avoid
+ // the overhead of creating the state.prerequisiteStack list in the most common case where
+ // there's only a single level prerequisites, we treat state.originalFlag as the first
+ // element in the stack.
+ if (flag != state.originalFlag) {
+ if (state.prerequisiteStack == null) {
+ state.prerequisiteStack = new ArrayList<>();
+ }
+ state.prerequisiteStack.add(flag.getKey());
+ }
+
+ for (int i = 0; i < nPrerequisites; i++) {
+ Prerequisite prereq = prerequisites.get(i);
+ String prereqKey = prereq.getKey();
+
+ if (prereqKey.equals(state.originalFlag.getKey()) ||
+ (flag != state.originalFlag && prereqKey.equals(flag.getKey())) ||
+ (state.prerequisiteStack != null && state.prerequisiteStack.contains(prereqKey))) {
+ throw new EvaluationException(ErrorKind.MALFORMED_FLAG,
+ "prerequisite relationship to \"" + prereqKey + "\" caused a circular reference;" +
+ " this is probably a temporary condition due to an incomplete update");
+ }
+
+ boolean prereqOk = true;
+ FeatureFlag prereqFeatureFlag = getters.getFlag(prereq.getKey());
+ if (prereqFeatureFlag == null) {
+ logger.error("Could not retrieve prerequisite flag \"{}\" when evaluating \"{}\"", prereq.getKey(), flag.getKey());
prereqOk = false;
+ } else {
+ EvalResult prereqEvalResult = evaluateInternal(prereqFeatureFlag, context, prereqEvals, state);
+ // Note that if the prerequisite flag is off, we don't consider it a match no matter what its
+ // off variation was. But we still need to evaluate it in order to generate an event.
+ if (!prereqFeatureFlag.isOn() || prereqEvalResult.getVariationIndex() != prereq.getVariation()) {
+ prereqOk = false;
+ }
+ if (prereqEvals != null) {
+ prereqEvals.recordPrerequisiteEvaluation(prereqFeatureFlag, flag, context, prereqEvalResult);
+ }
}
- if (prereqEvals != null) {
- prereqEvals.recordPrerequisiteEvaluation(prereqFeatureFlag, flag, user, prereqEvalResult);
+ if (!prereqOk) {
+ return EvaluatorHelpers.prerequisiteFailedResult(flag, prereq);
}
}
- if (!prereqOk) {
- return EvaluatorHelpers.prerequisiteFailedResult(flag, prereq);
+ return null; // all prerequisites were satisfied
+ }
+ finally {
+ if (state.prerequisiteStack != null && !state.prerequisiteStack.isEmpty()) {
+ state.prerequisiteStack.remove(state.prerequisiteStack.size() - 1);
}
}
- return null;
}
+ private static EvalResult checkTargets(
+ FeatureFlag flag,
+ LDContext context
+ ) {
+ List contextTargets = flag.getContextTargets(); // guaranteed non-null
+ List userTargets = flag.getTargets(); // guaranteed non-null
+ int nContextTargets = contextTargets.size();
+ int nUserTargets = userTargets.size();
+
+ if (nContextTargets == 0) {
+ // old-style data has only targets for users
+ if (nUserTargets != 0) {
+ LDContext userContext = context.getIndividualContext(ContextKind.DEFAULT);
+ if (userContext != null) {
+ for (int i = 0; i < nUserTargets; i++) {
+ Target t = userTargets.get(i);
+ if (t.getValues().contains(userContext.getKey())) { // getValues() is guaranteed non-null
+ return EvaluatorHelpers.targetMatchResult(flag, t);
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+ // new-style data has ContextTargets, which may include placeholders for user targets that are in Targets
+ for (int i = 0; i < nContextTargets; i++) {
+ Target t = contextTargets.get(i);
+ if (t.getContextKind() == null || t.getContextKind().isDefault()) {
+ LDContext userContext = context.getIndividualContext(ContextKind.DEFAULT);
+ if (userContext == null) {
+ continue;
+ }
+ for (int j = 0; j < nUserTargets; j++) {
+ Target ut = userTargets.get(j);
+ if (ut.getVariation() == t.getVariation()) {
+ if (ut.getValues().contains(userContext.getKey())) {
+ return EvaluatorHelpers.targetMatchResult(flag, t);
+ }
+ break;
+ }
+ }
+ } else {
+ if (contextKeyIsInTargetList(context, t.getContextKind(), t.getValues())) {
+ return EvaluatorHelpers.targetMatchResult(flag, t);
+ }
+ }
+ }
+ return null;
+ }
+
private EvalResult getValueForVariationOrRollout(
FeatureFlag flag,
VariationOrRollout vr,
- LDUser user,
+ LDContext context,
DataModelPreprocessing.EvalResultFactoryMultiVariations precomputedResults,
EvaluationReason reason
) {
@@ -217,7 +329,16 @@ private EvalResult getValueForVariationOrRollout(
} else {
Rollout rollout = vr.getRollout();
if (rollout != null && !rollout.getVariations().isEmpty()) {
- float bucket = bucketUser(rollout.getSeed(), user, flag.getKey(), rollout.getBucketBy(), flag.getSalt());
+ float bucket = computeBucketValue(
+ rollout.isExperiment(),
+ rollout.getSeed(),
+ context,
+ rollout.getContextKind(),
+ flag.getKey(),
+ rollout.getBucketBy(),
+ flag.getSalt()
+ );
+ boolean contextWasFound = bucket >= 0; // see comment on computeBucketValue
float sum = 0F;
List variations = rollout.getVariations(); // guaranteed non-null
int nVariations = variations.size();
@@ -226,7 +347,7 @@ private EvalResult getValueForVariationOrRollout(
sum += (float) wv.getWeight() / 100000F;
if (bucket < sum) {
variation = wv.getVariation();
- inExperiment = vr.getRollout().isExperiment() && !wv.isUntracked();
+ inExperiment = vr.getRollout().isExperiment() && !wv.isUntracked() && contextWasFound;
break;
}
}
@@ -265,101 +386,86 @@ private static EvaluationReason experimentize(EvaluationReason reason) {
return reason;
}
- private boolean ruleMatchesUser(FeatureFlag flag, Rule rule, LDUser user, EvaluatorState state) {
+ private boolean ruleMatchesContext(FeatureFlag flag, Rule rule, LDContext context, EvaluatorState state) {
List clauses = rule.getClauses(); // guaranteed non-null
int nClauses = clauses.size();
for (int i = 0; i < nClauses; i++) {
Clause clause = clauses.get(i);
- if (!clauseMatchesUser(clause, user, state)) {
+ if (!clauseMatchesContext(clause, context, state)) {
return false;
}
}
return true;
}
- private boolean clauseMatchesUser(Clause clause, LDUser user, EvaluatorState state) {
- // In the case of a segment match operator, we check if the user is in any of the segments,
- // and possibly negate
+ private boolean clauseMatchesContext(Clause clause, LDContext context, EvaluatorState state) {
if (clause.getOp() == Operator.segmentMatch) {
- List values = clause.getValues(); // guaranteed non-null
- int nValues = values.size();
- for (int i = 0; i < nValues; i++) {
- LDValue clauseValue = values.get(i);
- if (clauseValue.isString()) {
- Segment segment = getters.getSegment(clauseValue.stringValue());
- if (segment != null) {
- if (segmentMatchesUser(segment, user, state)) {
- return maybeNegate(clause, true);
- }
- }
- }
- }
- return maybeNegate(clause, false);
+ return maybeNegate(clause, matchAnySegment(clause.getValues(), context, state));
}
-
- return clauseMatchesUserNoSegments(clause, user);
- }
-
- private boolean clauseMatchesUserNoSegments(Clause clause, LDUser user) {
- LDValue userValue = user.getAttribute(clause.getAttribute());
- if (userValue.isNull()) {
+ AttributeRef attr = clause.getAttribute();
+ if (attr == null) {
+ throw new EvaluationException(ErrorKind.MALFORMED_FLAG, "rule clause did not specify an attribute");
+ }
+ if (!attr.isValid()) {
+ throw new EvaluationException(ErrorKind.MALFORMED_FLAG,
+ "invalid attribute reference \"" + attr.getError() + "\"");
+ }
+ if (attr.getDepth() == 1 && attr.getComponent(0).equals("kind")) {
+ return maybeNegate(clause, matchClauseByKind(clause, context));
+ }
+ LDContext actualContext = context.getIndividualContext(clause.getContextKind());
+ if (actualContext == null) {
+ return false;
+ }
+ LDValue contextValue = actualContext.getValue(attr);
+ if (contextValue.isNull()) {
return false;
}
- if (userValue.getType() == LDValueType.ARRAY) {
- int nValues = userValue.size();
+ if (contextValue.getType() == LDValueType.ARRAY) {
+ int nValues = contextValue.size();
for (int i = 0; i < nValues; i++) {
- LDValue value = userValue.get(i);
- if (value.getType() == LDValueType.ARRAY || value.getType() == LDValueType.OBJECT) {
- logger.error("Invalid custom attribute value in user object for user key \"{}\": {}", user.getKey(), value);
- return false;
- }
- if (clauseMatchAny(clause, value)) {
+ LDValue value = contextValue.get(i);
+ if (matchClauseWithoutSegments(clause, value)) {
return maybeNegate(clause, true);
}
}
return maybeNegate(clause, false);
- } else if (userValue.getType() != LDValueType.OBJECT) {
- return maybeNegate(clause, clauseMatchAny(clause, userValue));
+ } else if (contextValue.getType() != LDValueType.OBJECT) {
+ return maybeNegate(clause, matchClauseWithoutSegments(clause, contextValue));
}
- logger.warn("Got unexpected user attribute type \"{}\" for user key \"{}\" and attribute \"{}\"",
- userValue.getType(), user.getKey(), clause.getAttribute());
return false;
}
- static boolean clauseMatchAny(Clause clause, LDValue userValue) {
- Operator op = clause.getOp();
- if (op != null) {
- ClausePreprocessed preprocessed = clause.preprocessed;
- if (op == Operator.in) {
- // see if we have precomputed a Set for fast equality matching
- Set vs = preprocessed == null ? null : preprocessed.valuesSet;
- if (vs != null) {
- return vs.contains(userValue);
+ private boolean matchAnySegment(List values, LDContext context, EvaluatorState state) {
+ // For the segmentMatch operator, the values list is really a list of segment keys. We
+ // return a match if any of these segments matches the context.
+ int nValues = values.size();
+ for (int i = 0; i < nValues; i++) {
+ LDValue clauseValue = values.get(i);
+ if (!clauseValue.isString()) {
+ continue;
+ }
+ String segmentKey = clauseValue.stringValue();
+ if (state.segmentStack != null) {
+ // Clauses within a segment can reference other segments, so we don't want to get stuck in a cycle.
+ if (state.segmentStack.contains(segmentKey)) {
+ throw new EvaluationException(ErrorKind.MALFORMED_FLAG,
+ "segment rule referencing segment \"" + segmentKey + "\" caused a circular reference;" +
+ " this is probably a temporary condition due to an incomplete update");
}
}
- List values = clause.getValues();
- List preprocessedValues =
- preprocessed == null ? null : preprocessed.valuesExtra;
- int n = values.size();
- for (int i = 0; i < n; i++) {
- // the preprocessed list, if present, will always have the same size as the values list
- ClausePreprocessed.ValueData p = preprocessedValues == null ? null : preprocessedValues.get(i);
- LDValue v = values.get(i);
- if (EvaluatorOperators.apply(op, userValue, v, p)) {
+ Segment segment = getters.getSegment(segmentKey);
+ if (segment != null) {
+ if (segmentMatchesContext(segment, context, state)) {
return true;
}
}
}
return false;
}
-
- private boolean maybeNegate(Clause clause, boolean b) {
- return clause.isNegate() ? !b : b;
- }
- private boolean segmentMatchesUser(Segment segment, LDUser user, EvaluatorState state) {
- String userKey = user.getKey(); // we've already verified that the key is non-null at the top of evaluate()
+ private boolean segmentMatchesContext(Segment segment, LDContext context, EvaluatorState state) {
if (segment.isUnbounded()) {
if (segment.getGeneration() == null) {
// Big Segment queries can only be done if the generation is known. If it's unset, that
@@ -369,49 +475,78 @@ private boolean segmentMatchesUser(Segment segment, LDUser user, EvaluatorState
state.bigSegmentsStatus = EvaluationReason.BigSegmentsStatus.NOT_CONFIGURED;
return false;
}
-
- // Even if multiple Big Segments are referenced within a single flag evaluation, we only need
- // to do this query once, since it returns *all* of the user's segment memberships.
- if (state.bigSegmentsStatus == null) {
- BigSegmentStoreWrapper.BigSegmentsQueryResult queryResult = getters.getBigSegments(user.getKey());
+ LDContext matchContext = context.getIndividualContext(segment.getUnboundedContextKind());
+ if (matchContext == null) {
+ return false;
+ }
+ String key = matchContext.getKey();
+ BigSegmentStoreTypes.Membership membershipData =
+ state.bigSegmentsMembership == null ? null : state.bigSegmentsMembership.get(key);
+ if (membershipData == null) {
+ BigSegmentStoreWrapper.BigSegmentsQueryResult queryResult = getters.getBigSegments(key);
if (queryResult == null) {
// The SDK hasn't been configured to be able to use big segments
state.bigSegmentsStatus = EvaluationReason.BigSegmentsStatus.NOT_CONFIGURED;
} else {
+ membershipData = queryResult.membership;
state.bigSegmentsStatus = queryResult.status;
- state.bigSegmentsMembership = queryResult.membership;
+ if (state.bigSegmentsMembership == null) {
+ state.bigSegmentsMembership = new HashMap<>();
+ }
+ state.bigSegmentsMembership.put(key, membershipData);
}
}
- Boolean membership = state.bigSegmentsMembership == null ?
- null : state.bigSegmentsMembership.checkMembership(makeBigSegmentRef(segment));
- if (membership != null) {
- return membership;
+ Boolean membershipResult = membershipData == null ? null :
+ membershipData.checkMembership(makeBigSegmentRef(segment));
+ if (membershipResult != null) {
+ return membershipResult.booleanValue();
}
} else {
- if (segment.getIncluded().contains(userKey)) { // getIncluded(), getExcluded(), and getRules() are guaranteed non-null
+ if (contextKeyIsInTargetList(context, ContextKind.DEFAULT, segment.getIncluded())) {
return true;
}
- if (segment.getExcluded().contains(userKey)) {
+ if (contextKeyIsInTargetLists(context, segment.getIncludedContexts())) {
+ return true;
+ }
+ if (contextKeyIsInTargetList(context, ContextKind.DEFAULT, segment.getExcluded())) {
+ return false;
+ }
+ if (contextKeyIsInTargetLists(context, segment.getExcludedContexts())) {
return false;
}
}
List rules = segment.getRules(); // guaranteed non-null
- int nRules = rules.size();
- for (int i = 0; i < nRules; i++) {
- SegmentRule rule = rules.get(i);
- if (segmentRuleMatchesUser(rule, user, segment.getKey(), segment.getSalt())) {
- return true;
+ if (!rules.isEmpty()) {
+ // Evaluating rules means we might be doing recursive segment matches, so we'll push the current
+ // segment key onto the stack for cycle detection.
+ if (state.segmentStack == null) {
+ state.segmentStack = new ArrayList<>();
+ }
+ state.segmentStack.add(segment.getKey());
+ int nRules = rules.size();
+ for (int i = 0; i < nRules; i++) {
+ SegmentRule rule = rules.get(i);
+ if (segmentRuleMatchesContext(rule, context, state, segment.getKey(), segment.getSalt())) {
+ return true;
+ }
}
+ state.segmentStack.remove(state.segmentStack.size() - 1);
}
return false;
}
-
- private boolean segmentRuleMatchesUser(SegmentRule segmentRule, LDUser user, String segmentKey, String salt) {
+
+ private boolean segmentRuleMatchesContext(
+ SegmentRule segmentRule,
+ LDContext context,
+ EvaluatorState state,
+ String segmentKey,
+ String salt
+ ) {
List clauses = segmentRule.getClauses(); // guaranteed non-null
int nClauses = clauses.size();
for (int i = 0; i < nClauses; i++) {
Clause c = clauses.get(i);
- if (!clauseMatchesUserNoSegments(c, user)) {
+ if (!clauseMatchesContext(c, context, state)) {
return false;
}
}
@@ -421,18 +556,26 @@ private boolean segmentRuleMatchesUser(SegmentRule segmentRule, LDUser user, Str
return true;
}
- // All of the clauses are met. See if the user buckets in
- double bucket = EvaluatorBucketing.bucketUser(null, user, segmentKey, segmentRule.getBucketBy(), salt);
+ // All of the clauses are met. See if the context buckets in
+ double bucket = computeBucketValue(
+ false,
+ null,
+ context,
+ segmentRule.getRolloutContextKind(),
+ segmentKey,
+ segmentRule.getBucketBy(),
+ salt
+ );
double weight = (double)segmentRule.getWeight() / 100000.0;
return bucket < weight;
}
- private EvalResult computeRuleMatch(FeatureFlag flag, LDUser user, Rule rule, int ruleIndex) {
+ private EvalResult computeRuleMatch(FeatureFlag flag, LDContext context, Rule rule, int ruleIndex) {
if (rule.preprocessed != null) {
- return getValueForVariationOrRollout(flag, rule, user, rule.preprocessed.allPossibleResults, null);
+ return getValueForVariationOrRollout(flag, rule, context, rule.preprocessed.allPossibleResults, null);
}
EvaluationReason reason = EvaluationReason.ruleMatch(ruleIndex, rule.getId());
- return getValueForVariationOrRollout(flag, rule, user, null, reason);
+ return getValueForVariationOrRollout(flag, rule, context, null, reason);
}
static String makeBigSegmentRef(Segment segment) {
diff --git a/src/main/java/com/launchdarkly/sdk/server/EvaluatorBucketing.java b/src/main/java/com/launchdarkly/sdk/server/EvaluatorBucketing.java
index b770020cb..2a96663d7 100644
--- a/src/main/java/com/launchdarkly/sdk/server/EvaluatorBucketing.java
+++ b/src/main/java/com/launchdarkly/sdk/server/EvaluatorBucketing.java
@@ -1,8 +1,9 @@
package com.launchdarkly.sdk.server;
-import com.launchdarkly.sdk.LDUser;
+import com.launchdarkly.sdk.AttributeRef;
+import com.launchdarkly.sdk.ContextKind;
+import com.launchdarkly.sdk.LDContext;
import com.launchdarkly.sdk.LDValue;
-import com.launchdarkly.sdk.UserAttribute;
import org.apache.commons.codec.digest.DigestUtils;
@@ -14,24 +15,52 @@ private EvaluatorBucketing() {}
private static final float LONG_SCALE = (float) 0xFFFFFFFFFFFFFFFL;
- static float bucketUser(Integer seed, LDUser user, String key, UserAttribute attr, String salt) {
- LDValue userValue = user.getAttribute(attr == null ? UserAttribute.KEY : attr);
- String idHash = getBucketableStringValue(userValue);
- if (idHash != null) {
- String prefix;
- if (seed != null) {
- prefix = seed.toString();
- } else {
- prefix = key + "." + salt;
+ // Computes a bucket value for a rollout or experiment. If an error condition prevents
+ // us from computing a valid bucket value, we return 0, which will cause the evaluator
+ // to select the first bucket. A special case is if no context of the desired kind is
+ // found, in which case we return the special value -1; this similarly will cause the
+ // first bucket to be chosen (since it is less than the end value of the bucket, just
+ // as 0 is), but also tells the evaluator that inExperiment must be set to false.
+ static float computeBucketValue(
+ boolean isExperiment,
+ Integer seed,
+ LDContext context,
+ ContextKind contextKind,
+ String flagOrSegmentKey,
+ AttributeRef attr,
+ String salt
+ ) {
+ LDContext matchContext = context.getIndividualContext(contextKind);
+ if (matchContext == null) {
+ return -1;
+ }
+ LDValue contextValue;
+ if (isExperiment || attr == null) {
+ contextValue = LDValue.of(matchContext.getKey());
+ } else {
+ if (!attr.isValid()) {
+ return 0;
}
- if (user.getSecondary() != null) {
- idHash = idHash + "." + user.getSecondary();
+ contextValue = matchContext.getValue(attr);
+ if (contextValue.isNull()) {
+ return 0;
}
- String hash = DigestUtils.sha1Hex(prefix + "." + idHash).substring(0, 15);
- long longVal = Long.parseLong(hash, 16);
- return (float) longVal / LONG_SCALE;
}
- return 0F;
+
+ String idHash = getBucketableStringValue(contextValue);
+ if (idHash == null) {
+ return 0;
+ }
+
+ String prefix;
+ if (seed != null) {
+ prefix = seed.toString();
+ } else {
+ prefix = flagOrSegmentKey + "." + salt;
+ }
+ String hash = DigestUtils.sha1Hex(prefix + "." + idHash).substring(0, 15);
+ long longVal = Long.parseLong(hash, 16);
+ return (float) longVal / LONG_SCALE;
}
private static String getBucketableStringValue(LDValue userValue) {
diff --git a/src/main/java/com/launchdarkly/sdk/server/EvaluatorHelpers.java b/src/main/java/com/launchdarkly/sdk/server/EvaluatorHelpers.java
index 68b53de0e..2e4936927 100644
--- a/src/main/java/com/launchdarkly/sdk/server/EvaluatorHelpers.java
+++ b/src/main/java/com/launchdarkly/sdk/server/EvaluatorHelpers.java
@@ -1,17 +1,31 @@
package com.launchdarkly.sdk.server;
+import com.launchdarkly.sdk.ContextKind;
import com.launchdarkly.sdk.EvaluationDetail;
import com.launchdarkly.sdk.EvaluationReason;
import com.launchdarkly.sdk.EvaluationReason.ErrorKind;
+import com.launchdarkly.sdk.LDContext;
import com.launchdarkly.sdk.LDValue;
+import com.launchdarkly.sdk.server.DataModel.Clause;
import com.launchdarkly.sdk.server.DataModel.FeatureFlag;
+import com.launchdarkly.sdk.server.DataModel.Operator;
import com.launchdarkly.sdk.server.DataModel.Prerequisite;
+import com.launchdarkly.sdk.server.DataModel.SegmentTarget;
import com.launchdarkly.sdk.server.DataModel.Target;
+import com.launchdarkly.sdk.server.DataModelPreprocessing.ClausePreprocessed;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
import static com.launchdarkly.sdk.EvaluationDetail.NO_VARIATION;
+import static com.launchdarkly.sdk.server.EvaluatorHelpers.contextKeyIsInTargetList;
/**
- * Low-level helpers for producing various kinds of evaluation results.
+ * Low-level helpers for producing various kinds of evaluation results. We also put any
+ * helpers here that are used by Evaluator if they are static, i.e. if they can be
+ * implemented without reference to the Evaluator instance's own state, so as to keep the
+ * Evaluator logic smaller and easier to follow.
*
* For all of the methods that return an {@link EvalResult}, the behavior is as follows:
* First we check if the flag data contains a preprocessed value for this kind of result; if
@@ -64,4 +78,69 @@ static EvaluationDetail evaluationDetailForVariation(FeatureFlag flag,
variation,
reason);
}
+
+ static boolean maybeNegate(Clause clause, boolean b) {
+ return clause.isNegate() ? !b : b;
+ }
+
+ // Performs an operator test between a single context value and all of the clause values, for any
+ // operator except segmentMatch.
+ static boolean matchClauseWithoutSegments(Clause clause, LDValue contextValue) {
+ Operator op = clause.getOp();
+ if (op != null) {
+ ClausePreprocessed preprocessed = clause.preprocessed;
+ if (op == Operator.in) {
+ // see if we have precomputed a Set for fast equality matching
+ Set vs = preprocessed == null ? null : preprocessed.valuesSet;
+ if (vs != null) {
+ return vs.contains(contextValue);
+ }
+ }
+ List values = clause.getValues();
+ List preprocessedValues =
+ preprocessed == null ? null : preprocessed.valuesExtra;
+ int n = values.size();
+ for (int i = 0; i < n; i++) {
+ // the preprocessed list, if present, will always have the same size as the values list
+ ClausePreprocessed.ValueData p = preprocessedValues == null ? null : preprocessedValues.get(i);
+ LDValue v = values.get(i);
+ if (EvaluatorOperators.apply(op, contextValue, v, p)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ static boolean matchClauseByKind(Clause clause, LDContext context) {
+ // If attribute is "kind", then we treat operator and values as a match expression against a list
+ // of all individual kinds in the context. That is, for a multi-kind context with kinds of "org"
+ // and "user", it is a match if either of those strings is a match with Operator and Values.
+ for (int i = 0; i < context.getIndividualContextCount(); i++) {
+ if (matchClauseWithoutSegments(clause, LDValue.of(
+ context.getIndividualContext(i).getKind().toString()))) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ static boolean contextKeyIsInTargetList(LDContext context, ContextKind contextKind, Collection keys) {
+ if (keys.isEmpty()) {
+ return false;
+ }
+ LDContext matchContext = context.getIndividualContext(contextKind);
+ return matchContext != null && keys.contains(matchContext.getKey());
+ }
+
+ static boolean contextKeyIsInTargetLists(LDContext context, List targets) {
+ int nTargets = targets.size();
+ for (int i = 0; i < nTargets; i++) {
+ SegmentTarget t = targets.get(i);
+ if (contextKeyIsInTargetList(context, t.getContextKind(), t.getValues())) {
+ return true;
+ }
+ }
+ return false;
+ }
}
diff --git a/src/main/java/com/launchdarkly/sdk/server/EvaluatorOperators.java b/src/main/java/com/launchdarkly/sdk/server/EvaluatorOperators.java
index d3043742b..635e0acb4 100644
--- a/src/main/java/com/launchdarkly/sdk/server/EvaluatorOperators.java
+++ b/src/main/java/com/launchdarkly/sdk/server/EvaluatorOperators.java
@@ -1,9 +1,13 @@
package com.launchdarkly.sdk.server;
import com.launchdarkly.sdk.LDValue;
+import com.launchdarkly.sdk.server.DataModel.Operator;
import com.launchdarkly.sdk.server.DataModelPreprocessing.ClausePreprocessed;
import java.time.Instant;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Function;
import java.util.regex.Pattern;
import static com.launchdarkly.sdk.server.EvaluatorTypeConversion.valueToDateTime;
@@ -16,138 +20,105 @@
abstract class EvaluatorOperators {
private EvaluatorOperators() {}
- private static enum ComparisonOp {
- EQ,
- LT,
- LTE,
- GT,
- GTE;
-
- boolean test(int delta) {
- switch (this) {
- case EQ:
- return delta == 0;
- case LT:
- return delta < 0;
- case LTE:
- return delta <= 0;
- case GT:
- return delta > 0;
- case GTE:
- return delta >= 0;
- }
- // COVERAGE: the compiler insists on a fallthrough line here, even though it's unreachable
- return false;
- }
+ private static interface OperatorFn {
+ boolean match(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed);
}
-
+
+ private static final Map OPERATORS = new HashMap<>();
+ static {
+ OPERATORS.put(Operator.in, EvaluatorOperators::applyIn);
+ OPERATORS.put(Operator.startsWith, EvaluatorOperators::applyStartsWith);
+ OPERATORS.put(Operator.endsWith, EvaluatorOperators::applyEndsWith);
+ OPERATORS.put(Operator.matches, EvaluatorOperators::applyMatches);
+ OPERATORS.put(Operator.contains, EvaluatorOperators::applyContains);
+ OPERATORS.put(Operator.lessThan, numericComparison(delta -> delta < 0));
+ OPERATORS.put(Operator.lessThanOrEqual, numericComparison(delta -> delta <= 0));
+ OPERATORS.put(Operator.greaterThan, numericComparison(delta -> delta > 0));
+ OPERATORS.put(Operator.greaterThanOrEqual, numericComparison(delta -> delta >= 0));
+ OPERATORS.put(Operator.before, dateComparison(delta -> delta < 0));
+ OPERATORS.put(Operator.after, dateComparison(delta -> delta > 0));
+ OPERATORS.put(Operator.semVerEqual, semVerComparison(delta -> delta == 0));
+ OPERATORS.put(Operator.semVerLessThan, semVerComparison(delta -> delta < 0));
+ OPERATORS.put(Operator.semVerGreaterThan, semVerComparison(delta -> delta > 0));
+ // Operator.segmentMatch is deliberately not included here, because it is implemented
+ // separately in Evaluator.
+ }
+
static boolean apply(
DataModel.Operator op,
LDValue userValue,
LDValue clauseValue,
ClausePreprocessed.ValueData preprocessed
) {
- switch (op) {
- case in:
- return userValue.equals(clauseValue);
-
- case endsWith:
- return userValue.isString() && clauseValue.isString() && userValue.stringValue().endsWith(clauseValue.stringValue());
-
- case startsWith:
- return userValue.isString() && clauseValue.isString() && userValue.stringValue().startsWith(clauseValue.stringValue());
-
- case matches:
- // If preprocessed is non-null, it means we've already tried to parse the clause value as a regex,
- // in which case if preprocessed.parsedRegex is null it was not a valid regex.
- Pattern clausePattern = preprocessed == null ? valueToRegex(clauseValue) : preprocessed.parsedRegex;
- return clausePattern != null && userValue.isString() &&
- clausePattern.matcher(userValue.stringValue()).find();
-
- case contains:
- return userValue.isString() && clauseValue.isString() && userValue.stringValue().contains(clauseValue.stringValue());
-
- case lessThan:
- return compareNumeric(ComparisonOp.LT, userValue, clauseValue);
-
- case lessThanOrEqual:
- return compareNumeric(ComparisonOp.LTE, userValue, clauseValue);
-
- case greaterThan:
- return compareNumeric(ComparisonOp.GT, userValue, clauseValue);
-
- case greaterThanOrEqual:
- return compareNumeric(ComparisonOp.GTE, userValue, clauseValue);
-
- case before:
- return compareDate(ComparisonOp.LT, userValue, clauseValue, preprocessed);
-
- case after:
- return compareDate(ComparisonOp.GT, userValue, clauseValue, preprocessed);
+ OperatorFn fn = OPERATORS.get(op);
+ return fn != null && fn.match(userValue, clauseValue, preprocessed);
+ }
- case semVerEqual:
- return compareSemVer(ComparisonOp.EQ, userValue, clauseValue, preprocessed);
+ static boolean applyIn(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed) {
+ return userValue.equals(clauseValue);
+ }
- case semVerLessThan:
- return compareSemVer(ComparisonOp.LT, userValue, clauseValue, preprocessed);
+ static boolean applyStartsWith(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed) {
+ return userValue.isString() && clauseValue.isString() && userValue.stringValue().startsWith(clauseValue.stringValue());
+ }
- case semVerGreaterThan:
- return compareSemVer(ComparisonOp.GT, userValue, clauseValue, preprocessed);
+ static boolean applyEndsWith(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed) {
+ return userValue.isString() && clauseValue.isString() && userValue.stringValue().endsWith(clauseValue.stringValue());
+ }
- case segmentMatch:
- // We shouldn't call apply() for this operator, because it is really implemented in
- // Evaluator.clauseMatchesUser().
- return false;
- };
- // COVERAGE: the compiler insists on a fallthrough line here, even though it's unreachable
- return false;
+ static boolean applyMatches(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed) {
+ // If preprocessed is non-null, it means we've already tried to parse the clause value as a regex,
+ // in which case if preprocessed.parsedRegex is null it was not a valid regex.
+ Pattern clausePattern = preprocessed == null ? valueToRegex(clauseValue) : preprocessed.parsedRegex;
+ return clausePattern != null && userValue.isString() &&
+ clausePattern.matcher(userValue.stringValue()).find();
}
- private static boolean compareNumeric(ComparisonOp op, LDValue userValue, LDValue clauseValue) {
- if (!userValue.isNumber() || !clauseValue.isNumber()) {
- return false;
- }
- double n1 = userValue.doubleValue();
- double n2 = clauseValue.doubleValue();
- int compare = n1 == n2 ? 0 : (n1 < n2 ? -1 : 1);
- return op.test(compare);
+ static boolean applyContains(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed) {
+ return userValue.isString() && clauseValue.isString() && userValue.stringValue().contains(clauseValue.stringValue());
}
- private static boolean compareDate(
- ComparisonOp op,
- LDValue userValue,
- LDValue clauseValue,
- ClausePreprocessed.ValueData preprocessed
- ) {
- // If preprocessed is non-null, it means we've already tried to parse the clause value as a date/time,
- // in which case if preprocessed.parsedDate is null it was not a valid date/time.
- Instant clauseDate = preprocessed == null ? valueToDateTime(clauseValue) : preprocessed.parsedDate;
- if (clauseDate == null) {
- return false;
- }
- Instant userDate = valueToDateTime(userValue);
- if (userDate == null) {
- return false;
- }
- return op.test(userDate.compareTo(clauseDate));
+ static OperatorFn numericComparison(Function comparisonTest) {
+ return (userValue, clauseValue, preprocessed) -> {
+ if (!userValue.isNumber() || !clauseValue.isNumber()) {
+ return false;
+ }
+ double n1 = userValue.doubleValue();
+ double n2 = clauseValue.doubleValue();
+ int delta = n1 == n2 ? 0 : (n1 < n2 ? -1 : 1);
+ return comparisonTest.apply(delta);
+ };
}
- private static boolean compareSemVer(
- ComparisonOp op,
- LDValue userValue,
- LDValue clauseValue,
- ClausePreprocessed.ValueData preprocessed
- ) {
- // If preprocessed is non-null, it means we've already tried to parse the clause value as a version,
- // in which case if preprocessed.parsedSemVer is null it was not a valid version.
- SemanticVersion clauseVer = preprocessed == null ? valueToSemVer(clauseValue) : preprocessed.parsedSemVer;
- if (clauseVer == null) {
- return false;
- }
- SemanticVersion userVer = valueToSemVer(userValue);
- if (userVer == null) {
- return false;
- }
- return op.test(userVer.compareTo(clauseVer));
+ static OperatorFn dateComparison(Function comparisonTest) {
+ return (userValue, clauseValue, preprocessed) -> {
+ // If preprocessed is non-null, it means we've already tried to parse the clause value as a date/time,
+ // in which case if preprocessed.parsedDate is null it was not a valid date/time.
+ Instant clauseDate = preprocessed == null ? valueToDateTime(clauseValue) : preprocessed.parsedDate;
+ if (clauseDate == null) {
+ return false;
+ }
+ Instant userDate = valueToDateTime(userValue);
+ if (userDate == null) {
+ return false;
+ }
+ return comparisonTest.apply(userDate.compareTo(clauseDate));
+ };
+ }
+
+ static OperatorFn semVerComparison(Function comparisonTest) {
+ return (userValue, clauseValue, preprocessed) -> {
+ // If preprocessed is non-null, it means we've already tried to parse the clause value as a version,
+ // in which case if preprocessed.parsedSemVer is null it was not a valid version.
+ SemanticVersion clauseVer = preprocessed == null ? valueToSemVer(clauseValue) : preprocessed.parsedSemVer;
+ if (clauseVer == null) {
+ return false;
+ }
+ SemanticVersion userVer = valueToSemVer(userValue);
+ if (userVer == null) {
+ return false;
+ }
+ return comparisonTest.apply(userVer.compareTo(clauseVer));
+ };
}
}
diff --git a/src/main/java/com/launchdarkly/sdk/server/EventFactory.java b/src/main/java/com/launchdarkly/sdk/server/EventFactory.java
deleted file mode 100644
index bc06127fd..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/EventFactory.java
+++ /dev/null
@@ -1,211 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import com.launchdarkly.sdk.EvaluationReason;
-import com.launchdarkly.sdk.EvaluationReason.ErrorKind;
-import com.launchdarkly.sdk.LDUser;
-import com.launchdarkly.sdk.LDValue;
-import com.launchdarkly.sdk.server.DataModel.FeatureFlag;
-import com.launchdarkly.sdk.server.interfaces.Event;
-import com.launchdarkly.sdk.server.interfaces.Event.Custom;
-import com.launchdarkly.sdk.server.interfaces.Event.FeatureRequest;
-import com.launchdarkly.sdk.server.interfaces.Event.Identify;
-
-import java.util.function.Supplier;
-
-abstract class EventFactory {
- public static final EventFactory DEFAULT = new Default(false, null);
- public static final EventFactory DEFAULT_WITH_REASONS = new Default(true, null);
-
- abstract Event.FeatureRequest newFeatureRequestEvent(
- DataModel.FeatureFlag flag,
- LDUser user,
- LDValue value,
- int variationIndex,
- EvaluationReason reason,
- boolean forceReasonTracking,
- LDValue defaultValue,
- String prereqOf
- );
-
- abstract Event.FeatureRequest newUnknownFeatureRequestEvent(
- String key,
- LDUser user,
- LDValue defaultValue,
- EvaluationReason.ErrorKind errorKind
- );
-
- abstract Event.Custom newCustomEvent(String key, LDUser user, LDValue data, Double metricValue);
-
- abstract Event.Identify newIdentifyEvent(LDUser user);
-
- abstract Event.AliasEvent newAliasEvent(LDUser user, LDUser previousUser);
-
- final Event.FeatureRequest newFeatureRequestEvent(
- DataModel.FeatureFlag flag,
- LDUser user,
- EvalResult result,
- LDValue defaultValue
- ) {
- return newFeatureRequestEvent(
- flag,
- user,
- result == null ? null : result.getValue(),
- result == null ? -1 : result.getVariationIndex(),
- result == null ? null : result.getReason(),
- result != null && result.isForceReasonTracking(),
- defaultValue,
- null
- );
- }
-
- final Event.FeatureRequest newDefaultFeatureRequestEvent(
- DataModel.FeatureFlag flag,
- LDUser user,
- LDValue defaultVal,
- EvaluationReason.ErrorKind errorKind
- ) {
- return newFeatureRequestEvent(
- flag,
- user,
- defaultVal,
- -1,
- EvaluationReason.error(errorKind),
- false,
- defaultVal,
- null
- );
- }
-
- final Event.FeatureRequest newPrerequisiteFeatureRequestEvent(
- DataModel.FeatureFlag prereqFlag,
- LDUser user,
- EvalResult result,
- DataModel.FeatureFlag prereqOf
- ) {
- return newFeatureRequestEvent(
- prereqFlag,
- user,
- result == null ? null : result.getValue(),
- result == null ? -1 : result.getVariationIndex(),
- result == null ? null : result.getReason(),
- result != null && result.isForceReasonTracking(),
- LDValue.ofNull(),
- prereqOf.getKey()
- );
- }
-
- static final Event.FeatureRequest newDebugEvent(Event.FeatureRequest from) {
- return new Event.FeatureRequest(
- from.getCreationDate(),
- from.getKey(),
- from.getUser(),
- from.getVersion(),
- from.getVariation(),
- from.getValue(),
- from.getDefaultVal(),
- from.getReason(),
- from.getPrereqOf(),
- from.isTrackEvents(),
- from.getDebugEventsUntilDate(),
- true
- );
- }
-
- static class Default extends EventFactory {
- private final boolean includeReasons;
- private final Supplier timestampFn;
-
- Default(boolean includeReasons, Supplier timestampFn) {
- this.includeReasons = includeReasons;
- this.timestampFn = timestampFn != null ? timestampFn : (() -> System.currentTimeMillis());
- }
-
- @Override
- final Event.FeatureRequest newFeatureRequestEvent(DataModel.FeatureFlag flag, LDUser user,
- LDValue value, int variationIndex, EvaluationReason reason, boolean forceReasonTracking,
- LDValue defaultValue, String prereqOf){
- return new Event.FeatureRequest(
- timestampFn.get(),
- flag.getKey(),
- user,
- flag.getVersion(),
- variationIndex,
- value,
- defaultValue,
- (forceReasonTracking || includeReasons) ? reason : null,
- prereqOf,
- forceReasonTracking || flag.isTrackEvents(),
- flag.getDebugEventsUntilDate() == null ? 0 : flag.getDebugEventsUntilDate().longValue(),
- false
- );
- }
-
- @Override
- final Event.FeatureRequest newUnknownFeatureRequestEvent(
- String key,
- LDUser user,
- LDValue defaultValue,
- EvaluationReason.ErrorKind errorKind
- ) {
- return new Event.FeatureRequest(
- timestampFn.get(),
- key,
- user,
- -1,
- -1,
- defaultValue,
- defaultValue,
- includeReasons ? EvaluationReason.error(errorKind) : null,
- null,
- false,
- 0,
- false
- );
- }
-
- @Override
- Event.Custom newCustomEvent(String key, LDUser user, LDValue data, Double metricValue) {
- return new Event.Custom(timestampFn.get(), key, user, data, metricValue);
- }
-
- @Override
- Event.Identify newIdentifyEvent(LDUser user) {
- return new Event.Identify(timestampFn.get(), user);
- }
-
- @Override
- Event.AliasEvent newAliasEvent(LDUser user, LDUser previousUser) {
- return new Event.AliasEvent(timestampFn.get(), user, previousUser);
- }
- }
-
- static final class Disabled extends EventFactory {
- static final Disabled INSTANCE = new Disabled();
-
- @Override
- final FeatureRequest newFeatureRequestEvent(FeatureFlag flag, LDUser user, LDValue value, int variationIndex,
- EvaluationReason reason, boolean inExperiment, LDValue defaultValue, String prereqOf) {
- return null;
- }
-
- @Override
- final FeatureRequest newUnknownFeatureRequestEvent(String key, LDUser user, LDValue defaultValue, ErrorKind errorKind) {
- return null;
- }
-
- @Override
- final Custom newCustomEvent(String key, LDUser user, LDValue data, Double metricValue) {
- return null;
- }
-
- @Override
- final Identify newIdentifyEvent(LDUser user) {
- return null;
- }
-
- @Override
- Event.AliasEvent newAliasEvent(LDUser user, LDUser previousUser) {
- return null;
- }
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/EventOutputFormatter.java b/src/main/java/com/launchdarkly/sdk/server/EventOutputFormatter.java
deleted file mode 100644
index 85f8c3b9d..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/EventOutputFormatter.java
+++ /dev/null
@@ -1,209 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import com.google.gson.Gson;
-import com.google.gson.stream.JsonWriter;
-import com.launchdarkly.sdk.EvaluationReason;
-import com.launchdarkly.sdk.LDUser;
-import com.launchdarkly.sdk.LDValue;
-import com.launchdarkly.sdk.server.EventSummarizer.CounterValue;
-import com.launchdarkly.sdk.server.EventSummarizer.FlagInfo;
-import com.launchdarkly.sdk.server.EventSummarizer.SimpleIntKeyedMap;
-import com.launchdarkly.sdk.server.interfaces.Event;
-
-import java.io.IOException;
-import java.io.Writer;
-import java.util.Map;
-
-/**
- * Transforms analytics events and summary data into the JSON format that we send to LaunchDarkly.
- * Rather than creating intermediate objects to represent this schema, we use the Gson streaming
- * output API to construct JSON directly.
- *
- * Test coverage for this logic is in EventOutputTest and DefaultEventProcessorOutputTest.
- */
-final class EventOutputFormatter {
- private final EventsConfiguration config;
- private final Gson gson;
-
- EventOutputFormatter(EventsConfiguration config) {
- this.config = config;
- this.gson = JsonHelpers.gsonInstanceForEventsSerialization(config);
- }
-
- @SuppressWarnings("resource")
- final int writeOutputEvents(Event[] events, EventSummarizer.EventSummary summary, Writer writer) throws IOException {
- int count = events.length;
- try (JsonWriter jsonWriter = new JsonWriter(writer)) {
- jsonWriter.beginArray();
- for (Event event: events) {
- writeOutputEvent(event, jsonWriter);
- }
- if (!summary.isEmpty()) {
- writeSummaryEvent(summary, jsonWriter);
- count++;
- }
- jsonWriter.endArray();
- }
- return count;
- }
-
- private final void writeOutputEvent(Event event, JsonWriter jw) throws IOException {
- if (event instanceof Event.FeatureRequest) {
- Event.FeatureRequest fe = (Event.FeatureRequest)event;
- startEvent(fe, fe.isDebug() ? "debug" : "feature", fe.getKey(), jw);
- writeUserOrKey(fe, fe.isDebug(), jw);
- if (fe.getVersion() >= 0) {
- jw.name("version");
- jw.value(fe.getVersion());
- }
- if (fe.getVariation() >= 0) {
- jw.name("variation");
- jw.value(fe.getVariation());
- }
- writeLDValue("value", fe.getValue(), jw);
- writeLDValue("default", fe.getDefaultVal(), jw);
- if (fe.getPrereqOf() != null) {
- jw.name("prereqOf");
- jw.value(fe.getPrereqOf());
- }
- writeEvaluationReason("reason", fe.getReason(), jw);
- if (!fe.getContextKind().equals("user")) {
- jw.name("contextKind").value(fe.getContextKind());
- }
- } else if (event instanceof Event.Identify) {
- startEvent(event, "identify", event.getUser() == null ? null : event.getUser().getKey(), jw);
- writeUser(event.getUser(), jw);
- } else if (event instanceof Event.Custom) {
- Event.Custom ce = (Event.Custom)event;
- startEvent(event, "custom", ce.getKey(), jw);
- writeUserOrKey(ce, false, jw);
- writeLDValue("data", ce.getData(), jw);
- if (!ce.getContextKind().equals("user")) {
- jw.name("contextKind").value(ce.getContextKind());
- }
- if (ce.getMetricValue() != null) {
- jw.name("metricValue");
- jw.value(ce.getMetricValue());
- }
- } else if (event instanceof Event.Index) {
- startEvent(event, "index", null, jw);
- writeUser(event.getUser(), jw);
- } else if (event instanceof Event.AliasEvent) {
- Event.AliasEvent ae = (Event.AliasEvent)event;
- startEvent(event, "alias", ae.getKey(), jw);
- jw.name("contextKind").value(ae.getContextKind());
- jw.name("previousKey").value(ae.getPreviousKey());
- jw.name("previousContextKind").value(ae.getPreviousContextKind());
- } else {
- return;
- }
-
- jw.endObject();
- }
-
- private final void writeSummaryEvent(EventSummarizer.EventSummary summary, JsonWriter jw) throws IOException {
- jw.beginObject();
-
- jw.name("kind");
- jw.value("summary");
-
- jw.name("startDate");
- jw.value(summary.startDate);
- jw.name("endDate");
- jw.value(summary.endDate);
-
- jw.name("features");
- jw.beginObject();
-
- for (Map.Entry flag: summary.counters.entrySet()) {
- String flagKey = flag.getKey();
- FlagInfo flagInfo = flag.getValue();
-
- jw.name(flagKey);
- jw.beginObject();
-
- writeLDValue("default", flagInfo.defaultVal, jw);
-
- jw.name("counters");
- jw.beginArray();
-
- for (int i = 0; i < flagInfo.versionsAndVariations.size(); i++) {
- int version = flagInfo.versionsAndVariations.keyAt(i);
- SimpleIntKeyedMap variations = flagInfo.versionsAndVariations.valueAt(i);
- for (int j = 0; j < variations.size(); j++) {
- int variation = variations.keyAt(j);
- CounterValue counter = variations.valueAt(j);
-
- jw.beginObject();
-
- if (variation >= 0) {
- jw.name("variation").value(variation);
- }
- if (version >= 0) {
- jw.name("version").value(version);
- } else {
- jw.name("unknown").value(true);
- }
- writeLDValue("value", counter.flagValue, jw);
- jw.name("count").value(counter.count);
-
- jw.endObject();
- }
- }
-
- jw.endArray(); // end of "counters" array
- jw.endObject(); // end of this flag
- }
-
- jw.endObject(); // end of "features"
- jw.endObject(); // end of summary event object
- }
-
- private final void startEvent(Event event, String kind, String key, JsonWriter jw) throws IOException {
- jw.beginObject();
- jw.name("kind");
- jw.value(kind);
- jw.name("creationDate");
- jw.value(event.getCreationDate());
- if (key != null) {
- jw.name("key");
- jw.value(key);
- }
- }
-
- private final void writeUserOrKey(Event event, boolean forceInline, JsonWriter jw) throws IOException {
- LDUser user = event.getUser();
- if (user != null) {
- if (config.inlineUsersInEvents || forceInline) {
- writeUser(user, jw);
- } else {
- jw.name("userKey");
- jw.value(user.getKey());
- }
- }
- }
-
- private final void writeUser(LDUser user, JsonWriter jw) throws IOException {
- jw.name("user");
- // config.gson is already set up to use our custom serializer, which knows about private attributes
- // and already uses the streaming approach
- gson.toJson(user, LDUser.class, jw);
- }
-
- private final void writeLDValue(String key, LDValue value, JsonWriter jw) throws IOException {
- if (value == null || value.isNull()) {
- return;
- }
- jw.name(key);
- gson.toJson(value, LDValue.class, jw); // LDValue defines its own custom serializer
- }
-
- // This logic is so that we don't have to define multiple custom serializers for the various reason subclasses.
- private final void writeEvaluationReason(String key, EvaluationReason er, JsonWriter jw) throws IOException {
- if (er == null) {
- return;
- }
- jw.name(key);
- gson.toJson(er, EvaluationReason.class, jw); // EvaluationReason defines its own custom serializer
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/EventSummarizer.java b/src/main/java/com/launchdarkly/sdk/server/EventSummarizer.java
deleted file mode 100644
index 0aaf1276e..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/EventSummarizer.java
+++ /dev/null
@@ -1,293 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import com.launchdarkly.sdk.LDValue;
-import com.launchdarkly.sdk.server.interfaces.Event;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-
-/**
- * Manages the state of summarizable information for the EventProcessor. Note that the
- * methods of this class are deliberately not thread-safe, because they should always
- * be called from EventProcessor's single message-processing thread.
- */
-final class EventSummarizer {
- private EventSummary eventsState;
-
- EventSummarizer() {
- this.eventsState = new EventSummary();
- }
-
- /**
- * Adds this event to our counters, if it is a type of event we need to count.
- * @param event an event
- */
- void summarizeEvent(Event event) {
- if (!(event instanceof Event.FeatureRequest)) {
- return;
- }
- Event.FeatureRequest fe = (Event.FeatureRequest)event;
- eventsState.incrementCounter(fe.getKey(), fe.getVariation(), fe.getVersion(), fe.getValue(), fe.getDefaultVal());
- eventsState.noteTimestamp(fe.getCreationDate());
- }
-
- /**
- * Gets the current summarized event data, and resets the EventSummarizer's state to contain
- * a new empty EventSummary.
- *
- * @return the summary state
- */
- EventSummary getSummaryAndReset() {
- EventSummary ret = eventsState;
- clear();
- return ret;
- }
-
- /**
- * Indicates that we decided not to send the summary values returned by {@link #getSummaryAndReset()},
- * and instead we should return to using the previous state object and keep accumulating data
- * in it.
- */
- void restoreTo(EventSummary previousState) {
- eventsState = previousState;
- }
-
- /**
- * Returns true if there is no summary data in the current state.
- *
- * @return true if the state is empty
- */
- boolean isEmpty() {
- return eventsState.isEmpty();
- }
-
- void clear() {
- eventsState = new EventSummary();
- }
-
- static final class EventSummary {
- final Map counters;
- long startDate;
- long endDate;
-
- EventSummary() {
- counters = new HashMap<>();
- }
-
- EventSummary(EventSummary from) {
- counters = new HashMap<>(from.counters);
- startDate = from.startDate;
- endDate = from.endDate;
- }
-
- boolean isEmpty() {
- return counters.isEmpty();
- }
-
- void incrementCounter(String flagKey, int variation, int version, LDValue flagValue, LDValue defaultVal) {
- FlagInfo flagInfo = counters.get(flagKey);
- if (flagInfo == null) {
- flagInfo = new FlagInfo(defaultVal, new SimpleIntKeyedMap<>());
- counters.put(flagKey, flagInfo);
- }
-
- SimpleIntKeyedMap variations = flagInfo.versionsAndVariations.get(version);
- if (variations == null) {
- variations = new SimpleIntKeyedMap<>();
- flagInfo.versionsAndVariations.put(version, variations);
- }
-
- CounterValue value = variations.get(variation);
- if (value == null) {
- variations.put(variation, new CounterValue(1, flagValue));
- } else {
- value.increment();
- }
- }
-
- void noteTimestamp(long time) {
- if (startDate == 0 || time < startDate) {
- startDate = time;
- }
- if (time > endDate) {
- endDate = time;
- }
- }
-
- @Override
- public boolean equals(Object other) {
- if (other instanceof EventSummary) {
- EventSummary o = (EventSummary)other;
- return o.counters.equals(counters) && startDate == o.startDate && endDate == o.endDate;
- }
- return false;
- }
-
- @Override
- public int hashCode() {
- // We can't make meaningful hash codes for EventSummary, because the same counters could be
- // represented differently in our Map. It doesn't matter because there's no reason to use an
- // EventSummary instance as a hash key.
- return 0;
- }
- }
-
- static final class FlagInfo {
- final LDValue defaultVal;
- final SimpleIntKeyedMap> versionsAndVariations;
-
- FlagInfo(LDValue defaultVal, SimpleIntKeyedMap> versionsAndVariations) {
- this.defaultVal = defaultVal;
- this.versionsAndVariations = versionsAndVariations;
- }
-
- @Override
- public boolean equals(Object other) {
- if (other instanceof FlagInfo) {
- FlagInfo o = (FlagInfo)other;
- return o.defaultVal.equals(this.defaultVal) && o.versionsAndVariations.equals(this.versionsAndVariations);
- }
- return false;
- }
-
- @Override
- public int hashCode() {
- return this.defaultVal.hashCode() + 31 * versionsAndVariations.hashCode();
- }
-
- @Override
- public String toString() { // used only in tests
- return "(default=" + defaultVal + ", counters=" + versionsAndVariations + ")";
- }
- }
-
- static final class CounterValue {
- long count;
- final LDValue flagValue;
-
- CounterValue(long count, LDValue flagValue) {
- this.count = count;
- this.flagValue = flagValue;
- }
-
- void increment() {
- count = count + 1;
- }
-
- @Override
- public boolean equals(Object other)
- {
- if (other instanceof CounterValue) {
- CounterValue o = (CounterValue)other;
- return count == o.count && Objects.equals(flagValue, o.flagValue);
- }
- return false;
- }
-
- @Override
- public String toString() { // used only in tests
- return "(" + count + "," + flagValue + ")";
- }
- }
-
- // A very simple array-backed structure with map-like semantics for primitive int keys. This
- // is highly specialized for the EventSummarizer use case (which is why it is an inner class
- // of EventSummarizer, to emphasize that it should not be used elsewhere). It makes the
- // following assumptions:
- // - The number of keys will almost always be small: most flags have only a few variations,
- // and most flags will have only one version or a few versions during the lifetime of an
- // event payload. Therefore, we use simple iteration and int comparisons for the keys; the
- // overhead of this is likely less than the overhead of maintaining a hashtable and creating
- // objects for its keys and iterators.
- // - Data will never be deleted from the map after being added (the summarizer simply makes
- // a new map when it's time to start over).
- static final class SimpleIntKeyedMap {
- private static final int INITIAL_CAPACITY = 4;
-
- private int[] keys;
- private Object[] values;
- private int n;
-
- SimpleIntKeyedMap() {
- keys = new int[INITIAL_CAPACITY];
- values = new Object[INITIAL_CAPACITY];
- }
-
- int size() {
- return n;
- }
-
- int capacity() {
- return keys.length;
- }
-
- int keyAt(int index) {
- return keys[index];
- }
-
- @SuppressWarnings("unchecked")
- T valueAt(int index) {
- return (T)values[index];
- }
-
- @SuppressWarnings("unchecked")
- T get(int key) {
- for (int i = 0; i < n; i++) {
- if (keys[i] == key) {
- return (T)values[i];
- }
- }
- return null;
- }
-
- SimpleIntKeyedMap put(int key, T value) {
- for (int i = 0; i < n; i++) {
- if (keys[i] == key) {
- values[i] = value;
- return this;
- }
- }
- if (n == keys.length) {
- int[] newKeys = new int[keys.length * 2];
- System.arraycopy(keys, 0, newKeys, 0, n);
- Object[] newValues = new Object[keys.length * 2];
- System.arraycopy(values, 0, newValues, 0, n);
- keys = newKeys;
- values = newValues;
- }
- keys[n] = key;
- values[n] = value;
- n++;
- return this;
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public boolean equals(Object o) { // used only in tests
- if (o instanceof SimpleIntKeyedMap>) {
- SimpleIntKeyedMap other = (SimpleIntKeyedMap)o;
- if (this.n == other.n) {
- for (int i = 0; i < n; i++) {
- T value1 = (T)values[i], value2 = other.get(keys[i]);
- if (!Objects.equals(value1, value2)) {
- return false;
- }
- }
- return true;
- }
- }
- return false;
- }
-
- @Override
- public String toString() { // used only in tests
- StringBuilder s = new StringBuilder("{");
- for (int i = 0; i < n; i++) {
- s.append(keys[i]).append("=").append(values[i] == null ? "null" : values[i].toString());
- }
- s.append("}");
- return s.toString();
- }
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/EventUserSerialization.java b/src/main/java/com/launchdarkly/sdk/server/EventUserSerialization.java
deleted file mode 100644
index ab6b190b0..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/EventUserSerialization.java
+++ /dev/null
@@ -1,112 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import com.google.gson.TypeAdapter;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import com.launchdarkly.sdk.LDUser;
-import com.launchdarkly.sdk.LDValue;
-import com.launchdarkly.sdk.UserAttribute;
-
-import java.io.IOException;
-import java.util.Set;
-import java.util.TreeSet;
-
-abstract class EventUserSerialization {
- private EventUserSerialization() {}
-
- // Used internally when including users in analytics events, to ensure that private attributes are stripped out.
- static class UserAdapterWithPrivateAttributeBehavior extends TypeAdapter {
- private static final UserAttribute[] BUILT_IN_OPTIONAL_STRING_ATTRIBUTES = new UserAttribute[] {
- UserAttribute.SECONDARY_KEY,
- UserAttribute.IP,
- UserAttribute.EMAIL,
- UserAttribute.NAME,
- UserAttribute.AVATAR,
- UserAttribute.FIRST_NAME,
- UserAttribute.LAST_NAME,
- UserAttribute.COUNTRY
- };
-
- private final EventsConfiguration config;
-
- public UserAdapterWithPrivateAttributeBehavior(EventsConfiguration config) {
- this.config = config;
- }
-
- @Override
- public void write(JsonWriter out, LDUser user) throws IOException {
- if (user == null) {
- out.value((String)null);
- return;
- }
-
- // Collect the private attribute names (use TreeSet to make ordering predictable for tests)
- Set privateAttributeNames = new TreeSet();
-
- out.beginObject();
- // The key can never be private
- out.name("key").value(user.getKey());
-
- for (UserAttribute attr: BUILT_IN_OPTIONAL_STRING_ATTRIBUTES) {
- LDValue value = user.getAttribute(attr);
- if (!value.isNull()) {
- if (!checkAndAddPrivate(attr, user, privateAttributeNames)) {
- out.name(attr.getName()).value(value.stringValue());
- }
- }
- }
- if (!user.getAttribute(UserAttribute.ANONYMOUS).isNull()) {
- out.name("anonymous").value(user.isAnonymous());
- }
- writeCustomAttrs(out, user, privateAttributeNames);
- writePrivateAttrNames(out, privateAttributeNames);
-
- out.endObject();
- }
-
- private void writePrivateAttrNames(JsonWriter out, Set names) throws IOException {
- if (names.isEmpty()) {
- return;
- }
- out.name("privateAttrs");
- out.beginArray();
- for (String name : names) {
- out.value(name);
- }
- out.endArray();
- }
-
- private boolean checkAndAddPrivate(UserAttribute attribute, LDUser user, Set privateAttrs) {
- boolean result = config.allAttributesPrivate || config.privateAttributes.contains(attribute) || user.isAttributePrivate(attribute);
- if (result) {
- privateAttrs.add(attribute.getName());
- }
- return result;
- }
-
- private void writeCustomAttrs(JsonWriter out, LDUser user, Set privateAttributeNames) throws IOException {
- boolean beganObject = false;
- for (UserAttribute attribute: user.getCustomAttributes()) {
- if (!checkAndAddPrivate(attribute, user, privateAttributeNames)) {
- if (!beganObject) {
- out.name("custom");
- out.beginObject();
- beganObject = true;
- }
- out.name(attribute.getName());
- LDValue value = user.getAttribute(attribute);
- JsonHelpers.gsonInstance().toJson(value, LDValue.class, out);
- }
- }
- if (beganObject) {
- out.endObject();
- }
- }
-
- @Override
- public LDUser read(JsonReader in) throws IOException {
- // We never need to unmarshal user objects, so there's no need to implement this
- return null;
- }
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/EventsConfiguration.java b/src/main/java/com/launchdarkly/sdk/server/EventsConfiguration.java
deleted file mode 100644
index 5e64742b7..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/EventsConfiguration.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import com.google.common.collect.ImmutableSet;
-import com.launchdarkly.sdk.UserAttribute;
-import com.launchdarkly.sdk.server.interfaces.EventSender;
-
-import java.net.URI;
-import java.time.Duration;
-import java.util.Set;
-
-// Used internally to encapsulate the various config/builder properties for events.
-final class EventsConfiguration {
- final boolean allAttributesPrivate;
- final int capacity;
- final EventSender eventSender;
- final URI eventsUri;
- final Duration flushInterval;
- final boolean inlineUsersInEvents;
- final ImmutableSet privateAttributes;
- final int userKeysCapacity;
- final Duration userKeysFlushInterval;
- final Duration diagnosticRecordingInterval;
-
- EventsConfiguration(
- boolean allAttributesPrivate,
- int capacity,
- EventSender eventSender,
- URI eventsUri,
- Duration flushInterval,
- boolean inlineUsersInEvents,
- Set privateAttributes,
- int userKeysCapacity,
- Duration userKeysFlushInterval,
- Duration diagnosticRecordingInterval
- ) {
- super();
- this.allAttributesPrivate = allAttributesPrivate;
- this.capacity = capacity;
- this.eventSender = eventSender;
- this.eventsUri = eventsUri;
- this.flushInterval = flushInterval;
- this.inlineUsersInEvents = inlineUsersInEvents;
- this.privateAttributes = privateAttributes == null ? ImmutableSet.of() : ImmutableSet.copyOf(privateAttributes);
- this.userKeysCapacity = userKeysCapacity;
- this.userKeysFlushInterval = userKeysFlushInterval;
- this.diagnosticRecordingInterval = diagnosticRecordingInterval;
- }
-}
\ No newline at end of file
diff --git a/src/main/java/com/launchdarkly/sdk/server/FeatureFlagsState.java b/src/main/java/com/launchdarkly/sdk/server/FeatureFlagsState.java
index 071ee5ee0..e48f03769 100644
--- a/src/main/java/com/launchdarkly/sdk/server/FeatureFlagsState.java
+++ b/src/main/java/com/launchdarkly/sdk/server/FeatureFlagsState.java
@@ -20,7 +20,7 @@
/**
* A snapshot of the state of all feature flags with regard to a specific user, generated by
- * calling {@link LDClientInterface#allFlagsState(com.launchdarkly.sdk.LDUser, FlagsStateOption...)}.
+ * calling {@link LDClientInterface#allFlagsState(com.launchdarkly.sdk.LDContext, FlagsStateOption...)}.
*
* LaunchDarkly defines a standard JSON encoding for this object, suitable for
* bootstrapping
@@ -91,10 +91,10 @@ private FeatureFlagsState(ImmutableMap flagMetadata, boole
*
* Application code will not normally use this builder, since the SDK creates its own instances.
* However, it may be useful in testing, to simulate values that might be returned by
- * {@link LDClient#allFlagsState(com.launchdarkly.sdk.LDUser, FlagsStateOption...)}.
+ * {@link LDClient#allFlagsState(com.launchdarkly.sdk.LDContext, FlagsStateOption...)}.
*
* @param options the same {@link FlagsStateOption}s, if any, that would be passed to
- * {@link LDClient#allFlagsState(com.launchdarkly.sdk.LDUser, FlagsStateOption...)}
+ * {@link LDClient#allFlagsState(com.launchdarkly.sdk.LDContext, FlagsStateOption...)}
* @return a builder object
* @since 5.6.0
*/
@@ -166,7 +166,7 @@ public int hashCode() {
*
* Application code will not normally use this builder, since the SDK creates its own instances.
* However, it may be useful in testing, to simulate values that might be returned by
- * {@link LDClient#allFlagsState(com.launchdarkly.sdk.LDUser, FlagsStateOption...)}.
+ * {@link LDClient#allFlagsState(com.launchdarkly.sdk.LDContext, FlagsStateOption...)}.
*
* @since 5.6.0
*/
diff --git a/src/main/java/com/launchdarkly/sdk/server/FeatureRequestor.java b/src/main/java/com/launchdarkly/sdk/server/FeatureRequestor.java
index 64a012eec..71c79c3b2 100644
--- a/src/main/java/com/launchdarkly/sdk/server/FeatureRequestor.java
+++ b/src/main/java/com/launchdarkly/sdk/server/FeatureRequestor.java
@@ -1,7 +1,8 @@
package com.launchdarkly.sdk.server;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.FullDataSet;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.ItemDescriptor;
+import com.launchdarkly.sdk.internal.http.HttpErrors.HttpErrorException;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor;
import java.io.Closeable;
import java.io.IOException;
diff --git a/src/main/java/com/launchdarkly/sdk/server/FlagTrackerImpl.java b/src/main/java/com/launchdarkly/sdk/server/FlagTrackerImpl.java
index a8f5e1467..3f9fb1aef 100644
--- a/src/main/java/com/launchdarkly/sdk/server/FlagTrackerImpl.java
+++ b/src/main/java/com/launchdarkly/sdk/server/FlagTrackerImpl.java
@@ -1,6 +1,6 @@
package com.launchdarkly.sdk.server;
-import com.launchdarkly.sdk.LDUser;
+import com.launchdarkly.sdk.LDContext;
import com.launchdarkly.sdk.LDValue;
import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent;
import com.launchdarkly.sdk.server.interfaces.FlagChangeListener;
@@ -13,11 +13,11 @@
final class FlagTrackerImpl implements FlagTracker {
private final EventBroadcasterImpl flagChangeBroadcaster;
- private final BiFunction evaluateFn;
+ private final BiFunction evaluateFn;
FlagTrackerImpl(
EventBroadcasterImpl flagChangeBroadcaster,
- BiFunction evaluateFn
+ BiFunction evaluateFn
) {
this.flagChangeBroadcaster = flagChangeBroadcaster;
this.evaluateFn = evaluateFn;
@@ -34,29 +34,29 @@ public void removeFlagChangeListener(FlagChangeListener listener) {
}
@Override
- public FlagChangeListener addFlagValueChangeListener(String flagKey, LDUser user, FlagValueChangeListener listener) {
- FlagValueChangeAdapter adapter = new FlagValueChangeAdapter(flagKey, user, listener);
+ public FlagChangeListener addFlagValueChangeListener(String flagKey, LDContext context, FlagValueChangeListener listener) {
+ FlagValueChangeAdapter adapter = new FlagValueChangeAdapter(flagKey, context, listener);
addFlagChangeListener(adapter);
return adapter;
}
private final class FlagValueChangeAdapter implements FlagChangeListener {
private final String flagKey;
- private final LDUser user;
+ private final LDContext context;
private final FlagValueChangeListener listener;
private final AtomicReference value;
- FlagValueChangeAdapter(String flagKey, LDUser user, FlagValueChangeListener listener) {
+ FlagValueChangeAdapter(String flagKey, LDContext context, FlagValueChangeListener listener) {
this.flagKey = flagKey;
- this.user = user;
+ this.context = context;
this.listener = listener;
- this.value = new AtomicReference<>(evaluateFn.apply(flagKey, user));
+ this.value = new AtomicReference<>(evaluateFn.apply(flagKey, context));
}
@Override
public void onFlagChange(FlagChangeEvent event) {
if (event.getKey().equals(flagKey)) {
- LDValue newValue = evaluateFn.apply(flagKey, user);
+ LDValue newValue = evaluateFn.apply(flagKey, context);
LDValue oldValue = value.getAndSet(newValue);
if (!newValue.equals(oldValue)) {
listener.onFlagValueChange(new FlagValueChangeEvent(flagKey, oldValue, newValue));
diff --git a/src/main/java/com/launchdarkly/sdk/server/FlagsStateOption.java b/src/main/java/com/launchdarkly/sdk/server/FlagsStateOption.java
index bbb9f1d51..8204ba9fc 100644
--- a/src/main/java/com/launchdarkly/sdk/server/FlagsStateOption.java
+++ b/src/main/java/com/launchdarkly/sdk/server/FlagsStateOption.java
@@ -4,7 +4,7 @@
import com.launchdarkly.sdk.server.interfaces.LDClientInterface;
/**
- * Optional parameters that can be passed to {@link LDClientInterface#allFlagsState(com.launchdarkly.sdk.LDUser, FlagsStateOption...)}.
+ * Optional parameters that can be passed to {@link LDClientInterface#allFlagsState(com.launchdarkly.sdk.LDContext, FlagsStateOption...)}.
* @since 4.3.0
*/
public final class FlagsStateOption {
diff --git a/src/main/java/com/launchdarkly/sdk/server/HttpConfigurationImpl.java b/src/main/java/com/launchdarkly/sdk/server/HttpConfigurationImpl.java
deleted file mode 100644
index 9415fe8b1..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/HttpConfigurationImpl.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import com.google.common.collect.ImmutableMap;
-import com.launchdarkly.sdk.server.interfaces.HttpAuthentication;
-import com.launchdarkly.sdk.server.interfaces.HttpConfiguration;
-
-import java.net.Proxy;
-import java.time.Duration;
-import java.util.Map;
-
-import javax.net.SocketFactory;
-import javax.net.ssl.SSLSocketFactory;
-import javax.net.ssl.X509TrustManager;
-
-final class HttpConfigurationImpl implements HttpConfiguration {
- final Duration connectTimeout;
- final Proxy proxy;
- final HttpAuthentication proxyAuth;
- final Duration socketTimeout;
- final SocketFactory socketFactory;
- final SSLSocketFactory sslSocketFactory;
- final X509TrustManager trustManager;
- final ImmutableMap defaultHeaders;
-
- HttpConfigurationImpl(Duration connectTimeout, Proxy proxy, HttpAuthentication proxyAuth,
- Duration socketTimeout, SocketFactory socketFactory,
- SSLSocketFactory sslSocketFactory, X509TrustManager trustManager,
- ImmutableMap defaultHeaders) {
- this.connectTimeout = connectTimeout;
- this.proxy = proxy;
- this.proxyAuth = proxyAuth;
- this.socketTimeout = socketTimeout;
- this.socketFactory = socketFactory;
- this.sslSocketFactory = sslSocketFactory;
- this.trustManager = trustManager;
- this.defaultHeaders = defaultHeaders;
- }
-
- @Override
- public Duration getConnectTimeout() {
- return connectTimeout;
- }
-
- @Override
- public Proxy getProxy() {
- return proxy;
- }
-
- @Override
- public HttpAuthentication getProxyAuthentication() {
- return proxyAuth;
- }
-
- @Override
- public Duration getSocketTimeout() {
- return socketTimeout;
- }
-
- @Override
- public SocketFactory getSocketFactory() {
- return socketFactory;
- }
-
- @Override
- public SSLSocketFactory getSslSocketFactory() {
- return sslSocketFactory;
- }
-
- @Override
- public X509TrustManager getTrustManager() {
- return trustManager;
- }
-
- @Override
- public Iterable> getDefaultHeaders() {
- return defaultHeaders.entrySet();
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/HttpErrorException.java b/src/main/java/com/launchdarkly/sdk/server/HttpErrorException.java
deleted file mode 100644
index 30b10f3ff..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/HttpErrorException.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-@SuppressWarnings("serial")
-final class HttpErrorException extends Exception {
- private final int status;
-
- public HttpErrorException(int status) {
- super("HTTP error " + status);
- this.status = status;
- }
-
- public int getStatus() {
- return status;
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/InMemoryDataStore.java b/src/main/java/com/launchdarkly/sdk/server/InMemoryDataStore.java
index e1ab782d0..47530843f 100644
--- a/src/main/java/com/launchdarkly/sdk/server/InMemoryDataStore.java
+++ b/src/main/java/com/launchdarkly/sdk/server/InMemoryDataStore.java
@@ -2,12 +2,12 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
-import com.launchdarkly.sdk.server.interfaces.DataStore;
import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.CacheStats;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.DataKind;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.FullDataSet;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.ItemDescriptor;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.KeyedItems;
+import com.launchdarkly.sdk.server.subsystems.DataStore;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems;
import java.io.IOException;
import java.util.HashMap;
diff --git a/src/main/java/com/launchdarkly/sdk/server/JsonHelpers.java b/src/main/java/com/launchdarkly/sdk/server/JsonHelpers.java
index 73885d3c7..155544415 100644
--- a/src/main/java/com/launchdarkly/sdk/server/JsonHelpers.java
+++ b/src/main/java/com/launchdarkly/sdk/server/JsonHelpers.java
@@ -7,8 +7,7 @@
import com.google.gson.reflect.TypeToken;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
-import com.launchdarkly.sdk.LDUser;
-import com.launchdarkly.sdk.server.interfaces.SerializationException;
+import com.launchdarkly.sdk.server.subsystems.SerializationException;
import java.io.IOException;
@@ -41,15 +40,6 @@ static Gson gsonInstanceWithNullsAllowed() {
return gsonWithNullsAllowed;
}
- /**
- * Creates a Gson instance that will correctly serialize users for the given configuration (private attributes, etc.).
- */
- static Gson gsonInstanceForEventsSerialization(EventsConfiguration config) {
- return new GsonBuilder()
- .registerTypeAdapter(LDUser.class, new EventUserSerialization.UserAdapterWithPrivateAttributeBehavior(config))
- .create();
- }
-
/**
* Deserializes an object from JSON. We should use this helper method instead of directly calling
* gson.fromJson() to minimize reliance on details of the framework we're using, and to ensure that we
diff --git a/src/main/java/com/launchdarkly/sdk/server/LDClient.java b/src/main/java/com/launchdarkly/sdk/server/LDClient.java
index db7a2bba0..77674abe3 100644
--- a/src/main/java/com/launchdarkly/sdk/server/LDClient.java
+++ b/src/main/java/com/launchdarkly/sdk/server/LDClient.java
@@ -5,26 +5,25 @@
import com.launchdarkly.logging.LogValues;
import com.launchdarkly.sdk.EvaluationDetail;
import com.launchdarkly.sdk.EvaluationReason;
-import com.launchdarkly.sdk.LDUser;
+import com.launchdarkly.sdk.LDContext;
import com.launchdarkly.sdk.LDValue;
import com.launchdarkly.sdk.LDValueType;
+import com.launchdarkly.sdk.internal.http.HttpHelpers;
import com.launchdarkly.sdk.server.DataModel.FeatureFlag;
-import com.launchdarkly.sdk.server.integrations.EventProcessorBuilder;
import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider;
import com.launchdarkly.sdk.server.interfaces.BigSegmentsConfiguration;
-import com.launchdarkly.sdk.server.interfaces.DataSource;
import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider;
-import com.launchdarkly.sdk.server.interfaces.DataSourceUpdates;
-import com.launchdarkly.sdk.server.interfaces.DataStore;
import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.ItemDescriptor;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.KeyedItems;
-import com.launchdarkly.sdk.server.interfaces.Event;
-import com.launchdarkly.sdk.server.interfaces.EventProcessor;
import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent;
import com.launchdarkly.sdk.server.interfaces.FlagChangeListener;
import com.launchdarkly.sdk.server.interfaces.FlagTracker;
import com.launchdarkly.sdk.server.interfaces.LDClientInterface;
+import com.launchdarkly.sdk.server.subsystems.DataSource;
+import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink;
+import com.launchdarkly.sdk.server.subsystems.DataStore;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems;
+import com.launchdarkly.sdk.server.subsystems.EventProcessor;
import org.apache.commons.codec.binary.Hex;
@@ -47,7 +46,7 @@
import static com.launchdarkly.sdk.EvaluationDetail.NO_VARIATION;
import static com.launchdarkly.sdk.server.DataModel.FEATURES;
import static com.launchdarkly.sdk.server.DataModel.SEGMENTS;
-import static com.launchdarkly.sdk.server.Util.isAsciiHeaderValue;
+import static com.launchdarkly.sdk.server.subsystems.EventProcessor.NO_VERSION;
/**
* A client for the LaunchDarkly API. Client instances are thread-safe. Applications should instantiate
@@ -64,14 +63,12 @@ public final class LDClient implements LDClientInterface {
final DataStore dataStore;
private final BigSegmentStoreStatusProvider bigSegmentStoreStatusProvider;
private final BigSegmentStoreWrapper bigSegmentStoreWrapper;
- private final DataSourceUpdates dataSourceUpdates;
+ private final DataSourceUpdateSink dataSourceUpdates;
private final DataStoreStatusProviderImpl dataStoreStatusProvider;
private final DataSourceStatusProviderImpl dataSourceStatusProvider;
private final FlagTrackerImpl flagTracker;
private final EventBroadcasterImpl flagChangeBroadcaster;
private final ScheduledExecutorService sharedExecutor;
- private final EventFactory eventFactoryDefault;
- private final EventFactory eventFactoryWithReasons;
private final LDLogger baseLogger;
private final LDLogger evaluationLogger;
private final Evaluator.PrerequisiteEvaluationSink prereqEvalsDefault;
@@ -179,39 +176,26 @@ private static final DataModel.Segment getSegment(DataStore store, String key) {
public LDClient(String sdkKey, LDConfig config) {
checkNotNull(config, "config must not be null");
this.sdkKey = checkNotNull(sdkKey, "sdkKey must not be null");
- if (!isAsciiHeaderValue(sdkKey) ) {
+ if (!HttpHelpers.isAsciiHeaderValue(sdkKey) ) {
throw new IllegalArgumentException("SDK key contained an invalid character");
}
this.offline = config.offline;
this.sharedExecutor = createSharedExecutor(config);
- boolean eventsDisabled = Components.isNullImplementation(config.eventProcessorFactory);
- if (eventsDisabled) {
- this.eventFactoryDefault = EventFactory.Disabled.INSTANCE;
- this.eventFactoryWithReasons = EventFactory.Disabled.INSTANCE;
- } else {
- this.eventFactoryDefault = EventFactory.DEFAULT;
- this.eventFactoryWithReasons = EventFactory.DEFAULT_WITH_REASONS;
- }
-
- // Do not create diagnostic accumulator if config has specified is opted out, or if we're not using the
- // standard event processor
- final boolean useDiagnostics = !config.diagnosticOptOut && config.eventProcessorFactory instanceof EventProcessorBuilder;
- final ClientContextImpl context = new ClientContextImpl(
+ final ClientContextImpl context = ClientContextImpl.fromConfig(
sdkKey,
config,
- sharedExecutor,
- useDiagnostics ? new DiagnosticAccumulator(new DiagnosticId(sdkKey)) : null
+ sharedExecutor
);
- this.baseLogger = context.getBasic().getBaseLogger();
+ this.baseLogger = context.getBaseLogger();
this.evaluationLogger = this.baseLogger.subLogger(Loggers.EVALUATION_LOGGER_NAME);
- this.eventProcessor = config.eventProcessorFactory.createEventProcessor(context);
+ this.eventProcessor = config.events.build(context);
EventBroadcasterImpl bigSegmentStoreStatusNotifier =
EventBroadcasterImpl.forBigSegmentStoreStatus(sharedExecutor, baseLogger);
- BigSegmentsConfiguration bigSegmentsConfig = config.bigSegmentsConfigBuilder.createBigSegmentsConfiguration(context);
+ BigSegmentsConfiguration bigSegmentsConfig = config.bigSegments.build(context);
if (bigSegmentsConfig.getStore() != null) {
bigSegmentStoreWrapper = new BigSegmentStoreWrapper(bigSegmentsConfig, bigSegmentStoreStatusNotifier, sharedExecutor,
this.baseLogger.subLogger(Loggers.BIG_SEGMENTS_LOGGER_NAME));
@@ -223,7 +207,7 @@ public LDClient(String sdkKey, LDConfig config) {
EventBroadcasterImpl dataStoreStatusNotifier =
EventBroadcasterImpl.forDataStoreStatus(sharedExecutor, baseLogger);
DataStoreUpdatesImpl dataStoreUpdates = new DataStoreUpdatesImpl(dataStoreStatusNotifier);
- this.dataStore = config.dataStoreFactory.createDataStore(context, dataStoreUpdates);
+ this.dataStore = config.dataStore.build(context.withDataStoreUpdateSink(dataStoreUpdates));
this.evaluator = new Evaluator(new Evaluator.Getters() {
public DataModel.FeatureFlag getFlag(String key) {
@@ -242,7 +226,7 @@ public BigSegmentStoreWrapper.BigSegmentsQueryResult getBigSegments(String key)
this.flagChangeBroadcaster = EventBroadcasterImpl.forFlagChangeEvents(sharedExecutor, baseLogger);
this.flagTracker = new FlagTrackerImpl(flagChangeBroadcaster,
- (key, user) -> jsonValueVariation(key, user, LDValue.ofNull()));
+ (key, ctx) -> jsonValueVariation(key, ctx, LDValue.ofNull()));
this.dataStoreStatusProvider = new DataStoreStatusProviderImpl(this.dataStore, dataStoreUpdates);
@@ -258,7 +242,7 @@ public BigSegmentStoreWrapper.BigSegmentsQueryResult getBigSegments(String key)
baseLogger
);
this.dataSourceUpdates = dataSourceUpdates;
- this.dataSource = config.dataSourceFactory.createDataSource(context, dataSourceUpdates);
+ this.dataSource = config.dataSource.build(context.withDataSourceUpdateSink(dataSourceUpdates));
this.dataSourceStatusProvider = new DataSourceStatusProviderImpl(dataSourceStatusNotifier, dataSourceUpdates);
this.prereqEvalsDefault = makePrerequisiteEventSender(false);
@@ -293,45 +277,45 @@ public boolean isInitialized() {
}
@Override
- public void track(String eventName, LDUser user) {
- trackData(eventName, user, LDValue.ofNull());
+ public void track(String eventName, LDContext context) {
+ trackData(eventName, context, LDValue.ofNull());
}
@Override
- public void trackData(String eventName, LDUser user, LDValue data) {
- if (user == null || user.getKey() == null || user.getKey().isEmpty()) {
- baseLogger.warn("Track called with null user or null/empty user key!");
+ public void trackData(String eventName, LDContext context, LDValue data) {
+ if (context == null) {
+ baseLogger.warn("Track called with null context!");
+ } else if (!context.isValid()) {
+ baseLogger.warn("Track called with invalid context: " + context.getError());
} else {
- eventProcessor.sendEvent(eventFactoryDefault.newCustomEvent(eventName, user, data, null));
+ eventProcessor.recordCustomEvent(context, eventName, data, null);
}
}
@Override
- public void trackMetric(String eventName, LDUser user, LDValue data, double metricValue) {
- if (user == null || user.getKey() == null || user.getKey().isEmpty()) {
- baseLogger.warn("Track called with null user or null/empty user key!");
+ public void trackMetric(String eventName, LDContext context, LDValue data, double metricValue) {
+ if (context == null) {
+ baseLogger.warn("Track called with null context!");
+ } else if (!context.isValid()) {
+ baseLogger.warn("Track called with invalid context: " + context.getError());
} else {
- eventProcessor.sendEvent(eventFactoryDefault.newCustomEvent(eventName, user, data, metricValue));
+ eventProcessor.recordCustomEvent(context, eventName, data, metricValue);
}
}
@Override
- public void identify(LDUser user) {
- if (user == null || user.getKey() == null || user.getKey().isEmpty()) {
- baseLogger.warn("Identify called with null user or null/empty user key!");
+ public void identify(LDContext context) {
+ if (context == null) {
+ baseLogger.warn("Identify called with null context!");
+ } else if (!context.isValid()) {
+ baseLogger.warn("Identify called with invalid context: " + context.getError());
} else {
- eventProcessor.sendEvent(eventFactoryDefault.newIdentifyEvent(user));
+ eventProcessor.recordIdentifyEvent(context);
}
}
- private void sendFlagRequestEvent(Event.FeatureRequest event) {
- if (event != null) {
- eventProcessor.sendEvent(event);
- }
- }
-
@Override
- public FeatureFlagsState allFlagsState(LDUser user, FlagsStateOption... options) {
+ public FeatureFlagsState allFlagsState(LDContext context, FlagsStateOption... options) {
FeatureFlagsState.Builder builder = FeatureFlagsState.builder(options);
if (isOffline()) {
@@ -347,11 +331,15 @@ public FeatureFlagsState allFlagsState(LDUser user, FlagsStateOption... options)
}
}
- if (user == null || user.getKey() == null) {
- evaluationLogger.warn("allFlagsState() was called with null user or null user key! returning no data");
+ if (context == null) {
+ evaluationLogger.warn("allFlagsState() was called with null context! returning no data");
return builder.valid(false).build();
}
-
+ if (!context.isValid()) {
+ evaluationLogger.warn("allFlagsState() was called with invalid context: " + context.getError());
+ return builder.valid(false).build();
+ }
+
boolean clientSideOnly = FlagsStateOption.hasOption(options, FlagsStateOption.CLIENT_SIDE_ONLY);
KeyedItems flags;
try {
@@ -371,7 +359,7 @@ public FeatureFlagsState allFlagsState(LDUser user, FlagsStateOption... options)
continue;
}
try {
- EvalResult result = evaluator.evaluate(flag, user, null);
+ EvalResult result = evaluator.evaluate(flag, context, null);
// Note: the null parameter to evaluate() is for the PrerequisiteEvaluationSink; allFlagsState should
// not generate evaluation events, so we don't want the evaluator to generate any prerequisite evaluation
// events either.
@@ -385,63 +373,63 @@ public FeatureFlagsState allFlagsState(LDUser user, FlagsStateOption... options)
}
return builder.build();
}
-
+
@Override
- public boolean boolVariation(String featureKey, LDUser user, boolean defaultValue) {
- return evaluate(featureKey, user, LDValue.of(defaultValue), LDValueType.BOOLEAN).booleanValue();
+ public boolean boolVariation(String featureKey, LDContext context, boolean defaultValue) {
+ return evaluate(featureKey, context, LDValue.of(defaultValue), LDValueType.BOOLEAN).booleanValue();
}
@Override
- public int intVariation(String featureKey, LDUser user, int defaultValue) {
- return evaluate(featureKey, user, LDValue.of(defaultValue), LDValueType.NUMBER).intValue();
+ public int intVariation(String featureKey, LDContext context, int defaultValue) {
+ return evaluate(featureKey, context, LDValue.of(defaultValue), LDValueType.NUMBER).intValue();
}
@Override
- public double doubleVariation(String featureKey, LDUser user, double defaultValue) {
- return evaluate(featureKey, user, LDValue.of(defaultValue), LDValueType.NUMBER).doubleValue();
+ public double doubleVariation(String featureKey, LDContext context, double defaultValue) {
+ return evaluate(featureKey, context, LDValue.of(defaultValue), LDValueType.NUMBER).doubleValue();
}
@Override
- public String stringVariation(String featureKey, LDUser user, String defaultValue) {
- return evaluate(featureKey, user, LDValue.of(defaultValue), LDValueType.STRING).stringValue();
+ public String stringVariation(String featureKey, LDContext context, String defaultValue) {
+ return evaluate(featureKey, context, LDValue.of(defaultValue), LDValueType.STRING).stringValue();
}
@Override
- public LDValue jsonValueVariation(String featureKey, LDUser user, LDValue defaultValue) {
- return evaluate(featureKey, user, LDValue.normalize(defaultValue), null);
+ public LDValue jsonValueVariation(String featureKey, LDContext context, LDValue defaultValue) {
+ return evaluate(featureKey, context, LDValue.normalize(defaultValue), null);
}
@Override
- public EvaluationDetail boolVariationDetail(String featureKey, LDUser user, boolean defaultValue) {
- EvalResult result = evaluateInternal(featureKey, user, LDValue.of(defaultValue),
+ public EvaluationDetail boolVariationDetail(String featureKey, LDContext context, boolean defaultValue) {
+ EvalResult result = evaluateInternal(featureKey, context, LDValue.of(defaultValue),
LDValueType.BOOLEAN, true);
return result.getAsBoolean();
}
@Override
- public EvaluationDetail intVariationDetail(String featureKey, LDUser user, int defaultValue) {
- EvalResult result = evaluateInternal(featureKey, user, LDValue.of(defaultValue),
+ public EvaluationDetail intVariationDetail(String featureKey, LDContext context, int defaultValue) {
+ EvalResult result = evaluateInternal(featureKey, context, LDValue.of(defaultValue),
LDValueType.NUMBER, true);
return result.getAsInteger();
}
@Override
- public EvaluationDetail doubleVariationDetail(String featureKey, LDUser user, double defaultValue) {
- EvalResult result = evaluateInternal(featureKey, user, LDValue.of(defaultValue),
+ public EvaluationDetail doubleVariationDetail(String featureKey, LDContext context, double defaultValue) {
+ EvalResult result = evaluateInternal(featureKey, context, LDValue.of(defaultValue),
LDValueType.NUMBER, true);
return result.getAsDouble();
}
@Override
- public EvaluationDetail stringVariationDetail(String featureKey, LDUser user, String defaultValue) {
- EvalResult result = evaluateInternal(featureKey, user, LDValue.of(defaultValue),
+ public EvaluationDetail stringVariationDetail(String featureKey, LDContext context, String defaultValue) {
+ EvalResult result = evaluateInternal(featureKey, context, LDValue.of(defaultValue),
LDValueType.STRING, true);
return result.getAsString();
}
@Override
- public EvaluationDetail jsonValueVariationDetail(String featureKey, LDUser user, LDValue defaultValue) {
- EvalResult result = evaluateInternal(featureKey, user, LDValue.normalize(defaultValue),
+ public EvaluationDetail jsonValueVariationDetail(String featureKey, LDContext context, LDValue defaultValue) {
+ EvalResult result = evaluateInternal(featureKey, context, LDValue.normalize(defaultValue),
null, true);
return result.getAnyType();
}
@@ -470,47 +458,46 @@ public boolean isFlagKnown(String featureKey) {
return false;
}
- private LDValue evaluate(String featureKey, LDUser user, LDValue defaultValue, LDValueType requireType) {
- return evaluateInternal(featureKey, user, defaultValue, requireType, false).getValue();
+ private LDValue evaluate(String featureKey, LDContext context, LDValue defaultValue, LDValueType requireType) {
+ return evaluateInternal(featureKey, context, defaultValue, requireType, false).getValue();
}
private EvalResult errorResult(EvaluationReason.ErrorKind errorKind, final LDValue defaultValue) {
return EvalResult.of(defaultValue, NO_VARIATION, EvaluationReason.error(errorKind));
}
- private EvalResult evaluateInternal(String featureKey, LDUser user, LDValue defaultValue,
+ private EvalResult evaluateInternal(String featureKey, LDContext context, LDValue defaultValue,
LDValueType requireType, boolean withDetail) {
- EventFactory eventFactory = withDetail ? eventFactoryWithReasons : eventFactoryDefault;
if (!isInitialized()) {
if (dataStore.isInitialized()) {
evaluationLogger.warn("Evaluation called before client initialized for feature flag \"{}\"; using last known values from data store", featureKey);
} else {
evaluationLogger.warn("Evaluation called before client initialized for feature flag \"{}\"; data store unavailable, returning default value", featureKey);
- sendFlagRequestEvent(eventFactory.newUnknownFeatureRequestEvent(featureKey, user, defaultValue,
- EvaluationReason.ErrorKind.CLIENT_NOT_READY));
+ recordEvaluationUnknownFlagErrorEvent(featureKey, context, defaultValue,
+ EvaluationReason.ErrorKind.CLIENT_NOT_READY, withDetail);
return errorResult(EvaluationReason.ErrorKind.CLIENT_NOT_READY, defaultValue);
}
}
+ if (context == null) {
+ evaluationLogger.warn("Null context when evaluating flag \"{}\"; returning default value", featureKey);
+ return errorResult(EvaluationReason.ErrorKind.USER_NOT_SPECIFIED, defaultValue);
+ }
+ if (!context.isValid()) {
+ evaluationLogger.warn("Invalid context when evaluating flag \"{}\"; returning default value: " + context.getError(), featureKey);
+ return errorResult(EvaluationReason.ErrorKind.USER_NOT_SPECIFIED, defaultValue);
+ }
+
DataModel.FeatureFlag featureFlag = null;
try {
featureFlag = getFlag(dataStore, featureKey);
if (featureFlag == null) {
evaluationLogger.info("Unknown feature flag \"{}\"; returning default value", featureKey);
- sendFlagRequestEvent(eventFactory.newUnknownFeatureRequestEvent(featureKey, user, defaultValue,
- EvaluationReason.ErrorKind.FLAG_NOT_FOUND));
+ recordEvaluationUnknownFlagErrorEvent(featureKey, context, defaultValue,
+ EvaluationReason.ErrorKind.FLAG_NOT_FOUND, withDetail);
return errorResult(EvaluationReason.ErrorKind.FLAG_NOT_FOUND, defaultValue);
}
- if (user == null || user.getKey() == null) {
- evaluationLogger.warn("Null user or null user key when evaluating flag \"{}\"; returning default value", featureKey);
- sendFlagRequestEvent(eventFactory.newDefaultFeatureRequestEvent(featureFlag, user, defaultValue,
- EvaluationReason.ErrorKind.USER_NOT_SPECIFIED));
- return errorResult(EvaluationReason.ErrorKind.USER_NOT_SPECIFIED, defaultValue);
- }
- if (user.getKey().isEmpty()) {
- evaluationLogger.warn("User key is blank. Flag evaluation will proceed, but the user will not be stored in LaunchDarkly");
- }
- EvalResult evalResult = evaluator.evaluate(featureFlag, user,
+ EvalResult evalResult = evaluator.evaluate(featureFlag, context,
withDetail ? prereqEvalsWithReasons : prereqEvalsDefault);
if (evalResult.isNoVariation()) {
evalResult = EvalResult.of(defaultValue, evalResult.getVariationIndex(), evalResult.getReason());
@@ -520,23 +507,22 @@ private EvalResult evaluateInternal(String featureKey, LDUser user, LDValue defa
!value.isNull() &&
value.getType() != requireType) {
evaluationLogger.error("Feature flag evaluation expected result as {}, but got {}", defaultValue.getType(), value.getType());
- sendFlagRequestEvent(eventFactory.newUnknownFeatureRequestEvent(featureKey, user, defaultValue,
- EvaluationReason.ErrorKind.WRONG_TYPE));
+ recordEvaluationErrorEvent(featureFlag, context, defaultValue, EvaluationReason.ErrorKind.WRONG_TYPE, withDetail);
return errorResult(EvaluationReason.ErrorKind.WRONG_TYPE, defaultValue);
}
}
- sendFlagRequestEvent(eventFactory.newFeatureRequestEvent(featureFlag, user, evalResult, defaultValue));
+ recordEvaluationEvent(featureFlag, context, evalResult, defaultValue, withDetail, null);
return evalResult;
} catch (Exception e) {
evaluationLogger.error("Encountered exception while evaluating feature flag \"{}\": {}", featureKey,
LogValues.exceptionSummary(e));
evaluationLogger.debug("{}", LogValues.exceptionTrace(e));
if (featureFlag == null) {
- sendFlagRequestEvent(eventFactory.newUnknownFeatureRequestEvent(featureKey, user, defaultValue,
- EvaluationReason.ErrorKind.EXCEPTION));
+ recordEvaluationUnknownFlagErrorEvent(featureKey, context, defaultValue,
+ EvaluationReason.ErrorKind.EXCEPTION, withDetail);
} else {
- sendFlagRequestEvent(eventFactory.newDefaultFeatureRequestEvent(featureFlag, user, defaultValue,
- EvaluationReason.ErrorKind.EXCEPTION));
+ recordEvaluationErrorEvent(featureFlag, context, defaultValue,
+ EvaluationReason.ErrorKind.EXCEPTION, withDetail);
}
return EvalResult.of(defaultValue, NO_VARIATION, EvaluationReason.exception(e));
}
@@ -562,6 +548,12 @@ public DataSourceStatusProvider getDataSourceStatusProvider() {
return dataSourceStatusProvider;
}
+ /**
+ * Shuts down the client and releases any resources it is using.
+ *
+ * Unless it is offline, the client will attempt to deliver any pending analytics events before
+ * closing.
+ */
@Override
public void close() throws IOException {
baseLogger.info("Closing LaunchDarkly Client");
@@ -586,14 +578,14 @@ public boolean isOffline() {
}
@Override
- public String secureModeHash(LDUser user) {
- if (user == null || user.getKey() == null) {
+ public String secureModeHash(LDContext context) {
+ if (context == null || !context.isValid()) {
return null;
}
try {
Mac mac = Mac.getInstance(HMAC_ALGORITHM);
mac.init(new SecretKeySpec(sdkKey.getBytes(), HMAC_ALGORITHM));
- return Hex.encodeHexString(mac.doFinal(user.getKey().getBytes("UTF8")));
+ return Hex.encodeHexString(mac.doFinal(context.getFullyQualifiedKey().getBytes("UTF8")));
} catch (InvalidKeyException | UnsupportedEncodingException | NoSuchAlgorithmException e) {
// COVERAGE: there is no way to cause these errors in a unit test.
baseLogger.error("Could not generate secure mode hash: {}", LogValues.exceptionSummary(e));
@@ -602,11 +594,6 @@ public String secureModeHash(LDUser user) {
return null;
}
- @Override
- public void alias(LDUser user, LDUser previousUser) {
- this.eventProcessor.sendEvent(eventFactoryDefault.newAliasEvent(user, previousUser));
- }
-
/**
* Returns the current version string of the client library.
* @return a version string conforming to Semantic Versioning (http://semver.org)
@@ -616,6 +603,79 @@ public String version() {
return Version.SDK_VERSION;
}
+ private void recordEvaluationUnknownFlagErrorEvent(
+ String flagKey,
+ LDContext context,
+ LDValue defaultValue,
+ EvaluationReason.ErrorKind errorKind,
+ boolean withReasons
+ ) {
+ eventProcessor.recordEvaluationEvent(
+ context,
+ flagKey,
+ NO_VERSION,
+ NO_VARIATION,
+ defaultValue,
+ withReasons ? EvaluationReason.error(errorKind) : null,
+ defaultValue,
+ null,
+ false,
+ null
+ );
+ }
+
+ private void recordEvaluationErrorEvent(
+ FeatureFlag flag,
+ LDContext context,
+ LDValue defaultValue,
+ EvaluationReason.ErrorKind errorKind,
+ boolean withReasons
+ ) {
+ eventProcessor.recordEvaluationEvent(
+ context,
+ flag.getKey(),
+ flag.getVersion(),
+ NO_VARIATION,
+ defaultValue,
+ withReasons ? EvaluationReason.error(errorKind) : null,
+ defaultValue,
+ null,
+ flag.isTrackEvents(),
+ flag.getDebugEventsUntilDate()
+ );
+ }
+
+ private void recordEvaluationEvent(
+ FeatureFlag flag,
+ LDContext context,
+ EvalResult result,
+ LDValue defaultValue,
+ boolean withReasons,
+ String prereqOf
+ ) {
+ eventProcessor.recordEvaluationEvent(
+ context,
+ flag.getKey(),
+ flag.getVersion(),
+ result.getVariationIndex(),
+ result.getValue(),
+ (withReasons || result.isForceReasonTracking()) ? result.getReason() : null,
+ defaultValue,
+ prereqOf,
+ flag.isTrackEvents() || result.isForceReasonTracking(),
+ flag.getDebugEventsUntilDate()
+ );
+ }
+
+ private Evaluator.PrerequisiteEvaluationSink makePrerequisiteEventSender(boolean withReasons) {
+ return new Evaluator.PrerequisiteEvaluationSink() {
+ @Override
+ public void recordPrerequisiteEvaluation(FeatureFlag flag, FeatureFlag prereqOfFlag, LDContext context, EvalResult result) {
+ recordEvaluationEvent(flag, context, result, LDValue.ofNull(), withReasons, prereqOfFlag.getKey());
+ }
+ };
+ }
+
// This executor is used for a variety of SDK tasks such as flag change events, checking the data store
// status after an outage, and the poll task in polling mode. These are all tasks that we do not expect
// to be executing frequently so that it is acceptable to use a single thread to execute them one at a
@@ -629,15 +689,4 @@ private ScheduledExecutorService createSharedExecutor(LDConfig config) {
.build();
return Executors.newSingleThreadScheduledExecutor(threadFactory);
}
-
- private Evaluator.PrerequisiteEvaluationSink makePrerequisiteEventSender(boolean withReasons) {
- final EventFactory factory = withReasons ? eventFactoryWithReasons : eventFactoryDefault;
- return new Evaluator.PrerequisiteEvaluationSink() {
- @Override
- public void recordPrerequisiteEvaluation(FeatureFlag flag, FeatureFlag prereqOfFlag, LDUser user, EvalResult result) {
- eventProcessor.sendEvent(
- factory.newPrerequisiteFeatureRequestEvent(flag, user, result, prereqOfFlag));
- }
- };
- }
}
diff --git a/src/main/java/com/launchdarkly/sdk/server/LDConfig.java b/src/main/java/com/launchdarkly/sdk/server/LDConfig.java
index 17a003f97..a10e894d1 100644
--- a/src/main/java/com/launchdarkly/sdk/server/LDConfig.java
+++ b/src/main/java/com/launchdarkly/sdk/server/LDConfig.java
@@ -1,20 +1,19 @@
package com.launchdarkly.sdk.server;
import com.launchdarkly.sdk.EvaluationReason;
+import com.launchdarkly.sdk.EvaluationReason.BigSegmentsStatus;
import com.launchdarkly.sdk.server.integrations.ApplicationInfoBuilder;
-import com.launchdarkly.sdk.server.integrations.BigSegmentsConfigurationBuilder;
import com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder;
import com.launchdarkly.sdk.server.interfaces.ApplicationInfo;
-import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreFactory;
-import com.launchdarkly.sdk.server.interfaces.DataSourceFactory;
-import com.launchdarkly.sdk.server.interfaces.DataStoreFactory;
-import com.launchdarkly.sdk.server.interfaces.EventProcessor;
-import com.launchdarkly.sdk.server.interfaces.EventProcessorFactory;
-import com.launchdarkly.sdk.server.interfaces.HttpConfigurationFactory;
-import com.launchdarkly.sdk.server.interfaces.LoggingConfigurationFactory;
+import com.launchdarkly.sdk.server.interfaces.BigSegmentsConfiguration;
import com.launchdarkly.sdk.server.interfaces.ServiceEndpoints;
+import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer;
+import com.launchdarkly.sdk.server.subsystems.DataSource;
+import com.launchdarkly.sdk.server.subsystems.DataStore;
+import com.launchdarkly.sdk.server.subsystems.EventProcessor;
+import com.launchdarkly.sdk.server.subsystems.HttpConfiguration;
+import com.launchdarkly.sdk.server.subsystems.LoggingConfiguration;
-import java.net.URI;
import java.time.Duration;
/**
@@ -29,13 +28,13 @@ public final class LDConfig {
protected static final LDConfig DEFAULT = new Builder().build();
final ApplicationInfo applicationInfo;
- final BigSegmentsConfigurationBuilder bigSegmentsConfigBuilder;
- final DataSourceFactory dataSourceFactory;
- final DataStoreFactory dataStoreFactory;
+ final ComponentConfigurer bigSegments;
+ final ComponentConfigurer dataSource;
+ final ComponentConfigurer dataStore;
final boolean diagnosticOptOut;
- final EventProcessorFactory eventProcessorFactory;
- final HttpConfigurationFactory httpConfigFactory;
- final LoggingConfigurationFactory loggingConfigFactory;
+ final ComponentConfigurer events;
+ final ComponentConfigurer http;
+ final ComponentConfigurer logging;
final ServiceEndpoints serviceEndpoints;
final boolean offline;
final Duration startWait;
@@ -43,26 +42,20 @@ public final class LDConfig {
protected LDConfig(Builder builder) {
if (builder.offline) {
- this.dataSourceFactory = Components.externalUpdatesOnly();
- this.eventProcessorFactory = Components.noEvents();
+ this.dataSource = Components.externalUpdatesOnly();
+ this.events = Components.noEvents();
} else {
- this.dataSourceFactory = builder.dataSourceFactory == null ? Components.streamingDataSource() :
- builder.dataSourceFactory;
- this.eventProcessorFactory = builder.eventProcessorFactory == null ? Components.sendEvents() :
- builder.eventProcessorFactory;
+ this.dataSource = builder.dataSource == null ? Components.streamingDataSource() : builder.dataSource;
+ this.events = builder.events == null ? Components.sendEvents() : builder.events;
}
this.applicationInfo = (builder.applicationInfoBuilder == null ? Components.applicationInfo() :
builder.applicationInfoBuilder)
.createApplicationInfo();
- this.bigSegmentsConfigBuilder = builder.bigSegmentsConfigBuilder == null ?
- Components.bigSegments(null) : builder.bigSegmentsConfigBuilder;
- this.dataStoreFactory = builder.dataStoreFactory == null ? Components.inMemoryDataStore() :
- builder.dataStoreFactory;
+ this.bigSegments = builder.bigSegments == null ? Components.bigSegments(null) : builder.bigSegments;
+ this.dataStore = builder.dataStore == null ? Components.inMemoryDataStore() : builder.dataStore;
this.diagnosticOptOut = builder.diagnosticOptOut;
- this.httpConfigFactory = builder.httpConfigFactory == null ? Components.httpConfiguration() :
- builder.httpConfigFactory;
- this.loggingConfigFactory = builder.loggingConfigFactory == null ? Components.logging() :
- builder.loggingConfigFactory;
+ this.http = builder.http == null ? Components.httpConfiguration() : builder.http;
+ this.logging = builder.logging == null ? Components.logging() : builder.logging;
this.offline = builder.offline;
this.serviceEndpoints = (builder.serviceEndpointsBuilder == null ? Components.serviceEndpoints() :
builder.serviceEndpointsBuilder)
@@ -84,13 +77,13 @@ protected LDConfig(Builder builder) {
*/
public static class Builder {
private ApplicationInfoBuilder applicationInfoBuilder = null;
- private BigSegmentsConfigurationBuilder bigSegmentsConfigBuilder = null;
- private DataSourceFactory dataSourceFactory = null;
- private DataStoreFactory dataStoreFactory = null;
+ private ComponentConfigurer bigSegments = null;
+ private ComponentConfigurer dataSource = null;
+ private ComponentConfigurer dataStore = null;
private boolean diagnosticOptOut = false;
- private EventProcessorFactory eventProcessorFactory = null;
- private HttpConfigurationFactory httpConfigFactory = null;
- private LoggingConfigurationFactory loggingConfigFactory = null;
+ private ComponentConfigurer events = null;
+ private ComponentConfigurer http = null;
+ private ComponentConfigurer logging = null;
private ServiceEndpointsBuilder serviceEndpointsBuilder = null;
private boolean offline = false;
private Duration startWait = DEFAULT_START_WAIT;
@@ -132,8 +125,7 @@ public Builder applicationInfo(ApplicationInfoBuilder applicationInfoBuilder) {
* By default, there is no implementation and Big Segments cannot be evaluated. In this case,
* any flag evaluation that references a Big Segment will behave as if no users are included in
* any Big Segments, and the {@link EvaluationReason} associated with any such flag evaluation
- * will have a {@link EvaluationReason.BigSegmentsStatus} of
- * {@link EvaluationReason.BigSegmentsStatus#NOT_CONFIGURED}.
+ * will have a {@link BigSegmentsStatus} of {@link BigSegmentsStatus#NOT_CONFIGURED}.
*
*
* // This example uses the Redis integration
@@ -143,13 +135,13 @@ public Builder applicationInfo(ApplicationInfoBuilder applicationInfoBuilder) {
* .build();
*
*
- * @param bigSegmentsConfigBuilder a configuration builder object returned by
- * {@link Components#bigSegments(BigSegmentStoreFactory)}.
+ * @param bigSegmentsConfigurer the Big Segments configuration builder
* @return the builder
* @since 5.7.0
+ * @see Components#bigSegments(ComponentConfigurer)
*/
- public Builder bigSegments(BigSegmentsConfigurationBuilder bigSegmentsConfigBuilder) {
- this.bigSegmentsConfigBuilder = bigSegmentsConfigBuilder;
+ public Builder bigSegments(ComponentConfigurer bigSegmentsConfigurer) {
+ this.bigSegments = bigSegmentsConfigurer;
return this;
}
@@ -163,12 +155,12 @@ public Builder bigSegments(BigSegmentsConfigurationBuilder bigSegmentsConfigBuil
* {@link com.launchdarkly.sdk.server.integrations.FileData#dataSource()}. See those methods
* for details on how to configure them.
*
- * @param factory the factory object
- * @return the builder
+ * @param dataSourceConfigurer the data source configuration builder
+ * @return the main configuration builder
* @since 4.12.0
*/
- public Builder dataSource(DataSourceFactory factory) {
- this.dataSourceFactory = factory;
+ public Builder dataSource(ComponentConfigurer dataSourceConfigurer) {
+ this.dataSource = dataSourceConfigurer;
return this;
}
@@ -176,14 +168,14 @@ public Builder dataSource(DataSourceFactory factory) {
* Sets the implementation of the data store to be used for holding feature flags and
* related data received from LaunchDarkly, using a factory object. The default is
* {@link Components#inMemoryDataStore()}; for database integrations, use
- * {@link Components#persistentDataStore(com.launchdarkly.sdk.server.interfaces.PersistentDataStoreFactory)}.
+ * {@link Components#persistentDataStore(ComponentConfigurer)}.
*
- * @param factory the factory object
- * @return the builder
+ * @param dataStoreConfigurer the data store configuration builder
+ * @return the main configuration builder
* @since 4.12.0
*/
- public Builder dataStore(DataStoreFactory factory) {
- this.dataStoreFactory = factory;
+ public Builder dataStore(ComponentConfigurer dataStoreConfigurer) {
+ this.dataStore = dataStoreConfigurer;
return this;
}
@@ -210,30 +202,34 @@ public Builder diagnosticOptOut(boolean diagnosticOptOut) {
/**
* Sets the implementation of {@link EventProcessor} to be used for processing analytics events.
*
- * The default is {@link Components#sendEvents()}, but you may choose to use a custom implementation
- * (for instance, a test fixture), or disable events with {@link Components#noEvents()}.
+ * The default is {@link Components#sendEvents()} with no custom options. You may instead call
+ * {@link Components#sendEvents()} and then set custom options for event processing; or, disable
+ * events with {@link Components#noEvents()}; or, choose to use a custom implementation (for
+ * instance, a test fixture).
*
- * @param factory a builder/factory object for event configuration
- * @return the builder
+ * @param eventsConfigurer the events configuration builder
+ * @return the main configuration builder
* @since 4.12.0
+ * @see Components#sendEvents()
+ * @see Components#noEvents()
*/
- public Builder events(EventProcessorFactory factory) {
- this.eventProcessorFactory = factory;
+ public Builder events(ComponentConfigurer eventsConfigurer) {
+ this.events = eventsConfigurer;
return this;
}
/**
- * Sets the SDK's networking configuration, using a factory object. This object is normally a
- * configuration builder obtained from {@link Components#httpConfiguration()}, which has methods
- * for setting individual HTTP-related properties.
+ * Sets the SDK's networking configuration, using a configuration builder. This builder is
+ * obtained from {@link Components#httpConfiguration()}, and has methods for setting individual
+ * HTTP-related properties.
*
- * @param factory the factory object
- * @return the builder
+ * @param httpConfigurer the HTTP configuration builder
+ * @return the main configuration builder
* @since 4.13.0
* @see Components#httpConfiguration()
*/
- public Builder http(HttpConfigurationFactory factory) {
- this.httpConfigFactory = factory;
+ public Builder http(ComponentConfigurer httpConfigurer) {
+ this.http = httpConfigurer;
return this;
}
@@ -242,13 +238,13 @@ public Builder http(HttpConfigurationFactory factory) {
* configuration builder obtained from {@link Components#logging()}, which has methods
* for setting individual logging-related properties.
*
- * @param factory the factory object
- * @return the builder
+ * @param loggingConfigurer the logging configuration builder
+ * @return the main configuration builder
* @since 5.0.0
* @see Components#logging()
*/
- public Builder logging(LoggingConfigurationFactory factory) {
- this.loggingConfigFactory = factory;
+ public Builder logging(ComponentConfigurer loggingConfigurer) {
+ this.logging = loggingConfigurer;
return this;
}
@@ -261,7 +257,7 @@ public Builder logging(LoggingConfigurationFactory factory) {
*
* This is equivalent to calling {@code dataSource(Components.externalUpdatesOnly())} and
* {@code events(Components.noEvents())}. It overrides any other values you may have set for
- * {@link #dataSource(DataSourceFactory)} or {@link #events(EventProcessorFactory)}.
+ * {@link #dataSource(ComponentConfigurer)} or {@link #events(ComponentConfigurer)}.
*
* @param offline when set to true no calls to LaunchDarkly will be made
* @return the builder
diff --git a/src/main/java/com/launchdarkly/sdk/server/LoggingConfigurationImpl.java b/src/main/java/com/launchdarkly/sdk/server/LoggingConfigurationImpl.java
deleted file mode 100644
index 81b7f1904..000000000
--- a/src/main/java/com/launchdarkly/sdk/server/LoggingConfigurationImpl.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package com.launchdarkly.sdk.server;
-
-import com.launchdarkly.logging.LDLogAdapter;
-import com.launchdarkly.sdk.server.interfaces.LoggingConfiguration;
-
-import java.time.Duration;
-
-final class LoggingConfigurationImpl implements LoggingConfiguration {
- private final String baseLoggerName;
- private final LDLogAdapter logAdapter;
- private final Duration logDataSourceOutageAsErrorAfter;
-
- LoggingConfigurationImpl(
- String baseLoggerName,
- LDLogAdapter logAdapter,
- Duration logDataSourceOutageAsErrorAfter
- ) {
- this.baseLoggerName = baseLoggerName;
- this.logAdapter = logAdapter;
- this.logDataSourceOutageAsErrorAfter = logDataSourceOutageAsErrorAfter;
- }
-
- @Override
- public String getBaseLoggerName() {
- return baseLoggerName;
- }
-
- @Override
- public LDLogAdapter getLogAdapter() {
- return logAdapter;
- }
-
- @Override
- public Duration getLogDataSourceOutageAsErrorAfter() {
- return logDataSourceOutageAsErrorAfter;
- }
-}
diff --git a/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapper.java b/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapper.java
index 836070293..b58adf354 100644
--- a/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapper.java
+++ b/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapper.java
@@ -11,15 +11,15 @@
import com.launchdarkly.logging.LDLogger;
import com.launchdarkly.logging.LogValues;
import com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder;
-import com.launchdarkly.sdk.server.interfaces.DataStore;
import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.CacheStats;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.DataKind;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.FullDataSet;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.ItemDescriptor;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.KeyedItems;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.SerializedItemDescriptor;
-import com.launchdarkly.sdk.server.interfaces.DataStoreUpdates;
-import com.launchdarkly.sdk.server.interfaces.PersistentDataStore;
+import com.launchdarkly.sdk.server.subsystems.DataStore;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.SerializedItemDescriptor;
+import com.launchdarkly.sdk.server.subsystems.DataStoreUpdateSink;
+import com.launchdarkly.sdk.server.subsystems.PersistentDataStore;
import java.io.IOException;
import java.time.Duration;
@@ -60,7 +60,7 @@ final class PersistentDataStoreWrapper implements DataStore {
Duration cacheTtl,
PersistentDataStoreBuilder.StaleValuesPolicy staleValuesPolicy,
boolean recordCacheStats,
- DataStoreUpdates dataStoreUpdates,
+ DataStoreUpdateSink dataStoreUpdates,
ScheduledExecutorService sharedExecutor,
LDLogger logger
) {
diff --git a/src/main/java/com/launchdarkly/sdk/server/PollingProcessor.java b/src/main/java/com/launchdarkly/sdk/server/PollingProcessor.java
index 435712878..99d63b552 100644
--- a/src/main/java/com/launchdarkly/sdk/server/PollingProcessor.java
+++ b/src/main/java/com/launchdarkly/sdk/server/PollingProcessor.java
@@ -2,14 +2,15 @@
import com.google.common.annotations.VisibleForTesting;
import com.launchdarkly.logging.LDLogger;
-import com.launchdarkly.sdk.server.interfaces.DataSource;
+import com.launchdarkly.sdk.internal.http.HttpErrors.HttpErrorException;
import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo;
import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorKind;
import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State;
-import com.launchdarkly.sdk.server.interfaces.DataSourceUpdates;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.FullDataSet;
-import com.launchdarkly.sdk.server.interfaces.DataStoreTypes.ItemDescriptor;
-import com.launchdarkly.sdk.server.interfaces.SerializationException;
+import com.launchdarkly.sdk.server.subsystems.DataSource;
+import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet;
+import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor;
+import com.launchdarkly.sdk.server.subsystems.SerializationException;
import java.io.IOException;
import java.time.Duration;
@@ -20,15 +21,15 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import static com.launchdarkly.sdk.server.Util.checkIfErrorIsRecoverableAndLog;
-import static com.launchdarkly.sdk.server.Util.httpErrorDescription;
+import static com.launchdarkly.sdk.internal.http.HttpErrors.checkIfErrorIsRecoverableAndLog;
+import static com.launchdarkly.sdk.internal.http.HttpErrors.httpErrorDescription;
final class PollingProcessor implements DataSource {
private static final String ERROR_CONTEXT_MESSAGE = "on polling request";
private static final String WILL_RETRY_MESSAGE = "will retry at next scheduled poll interval";
@VisibleForTesting final FeatureRequestor requestor;
- private final DataSourceUpdates dataSourceUpdates;
+ private final DataSourceUpdateSink dataSourceUpdates;
private final ScheduledExecutorService scheduler;
@VisibleForTesting final Duration pollInterval;
private final AtomicBoolean initialized = new AtomicBoolean(false);
@@ -38,7 +39,7 @@ final class PollingProcessor implements DataSource {
PollingProcessor(
FeatureRequestor requestor,
- DataSourceUpdates dataSourceUpdates,
+ DataSourceUpdateSink dataSourceUpdates,
ScheduledExecutorService sharedExecutor,
Duration pollInterval,
LDLogger logger
diff --git a/src/main/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEvents.java b/src/main/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEvents.java
new file mode 100644
index 000000000..d01b555d3
--- /dev/null
+++ b/src/main/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEvents.java
@@ -0,0 +1,97 @@
+package com.launchdarkly.sdk.server;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.launchdarkly.sdk.LDValue;
+import com.launchdarkly.sdk.ObjectBuilder;
+import com.launchdarkly.sdk.internal.events.DiagnosticConfigProperty;
+import com.launchdarkly.sdk.internal.events.DiagnosticStore;
+import com.launchdarkly.sdk.server.subsystems.ClientContext;
+import com.launchdarkly.sdk.server.subsystems.DiagnosticDescription;
+import com.launchdarkly.sdk.server.subsystems.HttpConfiguration;
+
+abstract class ServerSideDiagnosticEvents {
+ public static DiagnosticStore.SdkDiagnosticParams getSdkDiagnosticParams(
+ ClientContext clientContext,
+ LDConfig config
+ ) {
+ return new DiagnosticStore.SdkDiagnosticParams(
+ clientContext.getSdkKey(),
+ "java-server-sdk",
+ Version.SDK_VERSION,
+ "java",
+ makePlatformData(),
+ ImmutableMap.copyOf(clientContext.getHttp().getDefaultHeaders()),
+ makeConfigProperties(clientContext, config)
+ );
+ }
+
+ private static ImmutableList makeConfigProperties(ClientContext clientContext, LDConfig config) {
+ ImmutableList.Builder