diff --git a/.gitignore b/.gitignore index d327dbe8..d7b37d2f 100644 --- a/.gitignore +++ b/.gitignore @@ -13,4 +13,5 @@ mkmf.log *.gem .DS_Store -Gemfile.lock \ No newline at end of file +Gemfile.lock +.ruby-version diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 49c6df85..edaa9a64 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,20 +1,16 @@ -Contributing to the LaunchDarkly Server-side SDK for Ruby -================================================ +# Contributing to the LaunchDarkly Server-side SDK for Ruby LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/sdk/concepts/contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this SDK. -Submitting bug reports and feature requests ------------------- +## Submitting bug reports and feature requests The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/ruby-server-sdk/issues) in the SDK repository. Bug reports and feature requests specific to this SDK should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. -Submitting pull requests ------------------- +## Submitting pull requests We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. -Build instructions ------------------- +## Build instructions ### Prerequisites @@ -35,3 +31,32 @@ bundle exec rspec spec ``` By default, the full unit test suite includes live tests of the integrations for Consul, DynamoDB, and Redis. Those tests expect you to have instances of all of those databases running locally. To skip them, set the environment variable `LD_SKIP_DATABASE_TESTS=1` before running the tests. + +### Building documentation + +Documentation is built automatically with YARD for each release. To build the documentation locally: + +``` +cd docs +make +``` + +The output will appear in `docs/build/html`. + +## Code organization + +The SDK's namespacing convention is as follows: + +* `LaunchDarkly`: This namespace contains the most commonly used classes and methods in the SDK, such as `LDClient` and `EvaluationDetail`. +* `LaunchDarkly::Integrations`: This namespace contains entry points for optional features that are related to how the SDK communicates with other systems, such as `Redis`. +* `LaunchDarkly::Interfaces`: This namespace contains types that do not do anything by themselves, but may need to be referenced if you are using optional features or implementing a custom component. + +A special case is the namespace `LaunchDarkly::Impl`, and any namespaces within it. Everything under `Impl` is considered a private implementation detail: all files there are excluded from the generated documentation, and are considered subject to change at any time and not supported for direct use by application developers. We do this because Ruby's scope/visibility system is somewhat limited compared to other languages: a method can be `private` or `protected` within a class, but there is no way to make it visible to other classes in the SDK yet invisible to code outside of the SDK, and there is similarly no way to hide a class. + +So, if there is a class whose existence is entirely an implementation detail, it should be in `Impl`. Similarly, classes that are _not_ in `Impl` must not expose any public members that are not meant to be part of the supported public API. This is important because of our guarantee of backward compatibility for all public APIs within a major version: we want to be able to change our implementation details to suit the needs of the code, without worrying about breaking a customer's code. Due to how the language works, we can't actually prevent an application developer from referencing those classes in their code, but this convention makes it clear that such use is discouraged and unsupported. + +## Documenting types and methods + +All classes and public methods outside of `LaunchDarkly::Impl` should have documentation comments. These are used to build the API documentation that is published at https://launchdarkly.github.io/ruby-server-sdk/ and https://www.rubydoc.info/gems/launchdarkly-server-sdk. The documentation generator is YARD; see https://yardoc.org/ for the comment format it uses. + +Please try to make the style and terminology in documentation comments consistent with other documentation comments in the SDK. Also, if a class or method is being added that has an equivalent in other SDKs, and if we have described it in a consistent away in those other SDKs, please reuse the text whenever possible (with adjustments for anything language-specific) rather than writing new text. diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 95cda71e..3cfbf882 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -42,6 +42,7 @@ class Config # @option opts [String] :wrapper_name See {#wrapper_name}. # @option opts [String] :wrapper_version See {#wrapper_version}. # @option opts [#open] :socket_factory See {#socket_factory}. + # @option opts [BigSegmentsConfig] :big_segments See {#big_segments}. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -73,6 +74,7 @@ def initialize(opts = {}) @wrapper_name = opts[:wrapper_name] @wrapper_version = opts[:wrapper_version] @socket_factory = opts[:socket_factory] + @big_segments = opts[:big_segments] || BigSegmentsConfig.new(store: nil) end # @@ -258,10 +260,21 @@ def offline? # object. # # @return [LaunchDarkly::Interfaces::DataSource|lambda] - # @see FileDataSource + # @see LaunchDarkly::Integrations::FileData + # @see LaunchDarkly::Integrations::TestData # attr_reader :data_source + # + # Configuration options related to Big Segments. + # + # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + # @return [BigSegmentsConfig] + # + attr_reader :big_segments + # @deprecated This is replaced by {#data_source}. attr_reader :update_processor @@ -484,4 +497,68 @@ def self.minimum_diagnostic_recording_interval 60 end end + + # + # Configuration options related to Big Segments. + # + # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + # If your application uses Big Segments, you will need to create a `BigSegmentsConfig` that at a + # minimum specifies what database integration to use, and then pass the `BigSegmentsConfig` + # object as the `big_segments` parameter when creating a {Config}. + # + # @example Configuring Big Segments with Redis + # store = LaunchDarkly::Integrations::Redis::new_big_segments_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(big_segments: + # LaunchDarkly::BigSegmentsConfig.new(store: store)) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) + # + class BigSegmentsConfig + DEFAULT_USER_CACHE_SIZE = 1000 + DEFAULT_USER_CACHE_TIME = 5 + DEFAULT_STATUS_POLL_INTERVAL = 5 + DEFAULT_STALE_AFTER = 2 * 60 + + # + # Constructor for setting Big Segments options. + # + # @param store [LaunchDarkly::Interfaces::BigSegmentStore] the data store implementation + # @param user_cache_size [Integer] See {#user_cache_size}. + # @param user_cache_time [Float] See {#user_cache_time}. + # @param status_poll_interval [Float] See {#status_poll_interval}. + # @param stale_after [Float] See {#stale_after}. + # + def initialize(store:, user_cache_size: nil, user_cache_time: nil, status_poll_interval: nil, stale_after: nil) + @store = store + @user_cache_size = user_cache_size.nil? ? DEFAULT_USER_CACHE_SIZE : user_cache_size + @user_cache_time = user_cache_time.nil? ? DEFAULT_USER_CACHE_TIME : user_cache_time + @status_poll_interval = status_poll_interval.nil? ? DEFAULT_STATUS_POLL_INTERVAL : status_poll_interval + @stale_after = stale_after.nil? ? DEFAULT_STALE_AFTER : stale_after + end + + # The implementation of {LaunchDarkly::Interfaces::BigSegmentStore} that will be used to + # query the Big Segments database. + # @return [LaunchDarkly::Interfaces::BigSegmentStore] + attr_reader :store + + # The maximum number of users whose Big Segment state will be cached by the SDK at any given time. + # @return [Integer] + attr_reader :user_cache_size + + # The maximum length of time (in seconds) that the Big Segment state for a user will be cached + # by the SDK. + # @return [Float] + attr_reader :user_cache_time + + # The interval (in seconds) at which the SDK will poll the Big Segment store to make sure it is + # available and to determine how long ago it was updated. + # @return [Float] + attr_reader :status_poll_interval + + # The maximum length of time between updates of the Big Segments data before the data is + # considered out of date. + # @return [Float] + attr_reader :stale_after + end end diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index 4eae67bc..676da7a3 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -110,27 +110,42 @@ class EvaluationReason # Indicates the general category of the reason. Will always be one of the class constants such # as {#OFF}. + # @return [Symbol] attr_reader :kind # The index of the rule that was matched (0 for the first rule in the feature flag). If # {#kind} is not {#RULE_MATCH}, this will be `nil`. + # @return [Integer|nil] attr_reader :rule_index # A unique string identifier for the matched rule, which will not change if other rules are added # or deleted. If {#kind} is not {#RULE_MATCH}, this will be `nil`. + # @return [String] attr_reader :rule_id # A boolean or nil value representing if the rule or fallthrough has an experiment rollout. + # @return [Boolean|nil] attr_reader :in_experiment # The key of the prerequisite flag that did not return the desired variation. If {#kind} is not # {#PREREQUISITE_FAILED}, this will be `nil`. + # @return [String] attr_reader :prerequisite_key # A value indicating the general category of error. This should be one of the class constants such # as {#ERROR_FLAG_NOT_FOUND}. If {#kind} is not {#ERROR}, it will be `nil`. + # @return [Symbol] attr_reader :error_kind + # Describes the validity of Big Segment information, if and only if the flag evaluation required + # querying at least one Big Segment. Otherwise it returns `nil`. Possible values are defined by + # {BigSegmentsStatus}. + # + # Big Segments are a specific kind of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # @return [Symbol] + attr_reader :big_segments_status + # Returns an instance whose {#kind} is {#OFF}. # @return [EvaluationReason] def self.off @@ -196,11 +211,13 @@ def self.error(error_kind) def ==(other) if other.is_a? EvaluationReason @kind == other.kind && @rule_index == other.rule_index && @rule_id == other.rule_id && - @prerequisite_key == other.prerequisite_key && @error_kind == other.error_kind + @prerequisite_key == other.prerequisite_key && @error_kind == other.error_kind && + @big_segments_status == other.big_segments_status elsif other.is_a? Hash @kind.to_s == other[:kind] && @rule_index == other[:ruleIndex] && @rule_id == other[:ruleId] && @prerequisite_key == other[:prerequisiteKey] && - (other[:errorKind] == @error_kind.nil? ? nil : @error_kind.to_s) + (other[:errorKind] == @error_kind.nil? ? nil : @error_kind.to_s) && + (other[:bigSegmentsStatus] == @big_segments_status.nil? ? nil : @big_segments_status.to_s) end end @@ -242,7 +259,7 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json # enabled for a flag and the application called variation_detail, or 2. experimentation is # enabled for an evaluation. We can't reuse these hashes because an application could call # as_json and then modify the result. - case @kind + ret = case @kind when :RULE_MATCH if @in_experiment { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id, inExperiment: @in_experiment } @@ -262,6 +279,10 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json else { kind: @kind } end + if !@big_segments_status.nil? + ret[:bigSegmentsStatus] = @big_segments_status + end + ret end # Same as {#as_json}, but converts the JSON structure into a string. @@ -285,14 +306,24 @@ def [](key) @prerequisite_key when :errorKind @error_kind.nil? ? nil : @error_kind.to_s + when :bigSegmentsStatus + @big_segments_status.nil? ? nil : @big_segments_status.to_s else nil end end - private + def with_big_segments_status(big_segments_status) + return self if @big_segments_status == big_segments_status + EvaluationReason.new(@kind, @rule_index, @rule_id, @prerequisite_key, @error_kind, @in_experiment, big_segments_status) + end - def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_experiment=nil) + # + # Constructor that sets all properties. Applications should not normally use this constructor, + # but should use class methods like {#off} to avoid creating unnecessary instances. + # + def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_experiment=nil, + big_segments_status = nil) @kind = kind.to_sym @rule_index = rule_index @rule_id = rule_id @@ -301,11 +332,10 @@ def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_exper @prerequisite_key.freeze if !prerequisite_key.nil? @error_kind = error_kind @in_experiment = in_experiment + @big_segments_status = big_segments_status end - private_class_method :new - - def self.make_error(error_kind) + private_class_method def self.make_error(error_kind) new(:ERROR, nil, nil, nil, error_kind) end @@ -321,4 +351,33 @@ def self.make_error(error_kind) ERROR_EXCEPTION => make_error(ERROR_EXCEPTION) } end + + # + # Defines the possible values of {EvaluationReason#big_segments_status}. + # + module BigSegmentsStatus + # + # Indicates that the Big Segment query involved in the flag evaluation was successful, and + # that the segment state is considered up to date. + # + HEALTHY = :HEALTHY + + # + # Indicates that the Big Segment query involved in the flag evaluation was successful, but + # that the segment state may not be up to date. + # + STALE = :STALE + + # + # Indicates that Big Segments could not be queried for the flag evaluation because the SDK + # configuration did not include a Big Segment store. + # + NOT_CONFIGURED = :NOT_CONFIGURED + + # + # Indicates that the Big Segment query involved in the flag evaluation failed, for instance + # due to a database error. + # + STORE_ERROR = :STORE_ERROR + end end diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index f58ddf7c..30440353 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -1,314 +1,23 @@ -require 'concurrent/atomics' -require 'json' -require 'yaml' -require 'pathname' +require "ldclient-rb/integrations/file_data" module LaunchDarkly - # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the - # file data source or who don't need auto-updating, we only enable auto-update if the 'listen' - # gem has been provided by the host app. - # @private - @@have_listen = false - begin - require 'listen' - @@have_listen = true - rescue LoadError - end - - # @private - def self.have_listen? - @@have_listen - end - - # - # Provides a way to use local files as a source of feature flag state. This allows using a - # predetermined feature flag state without an actual LaunchDarkly connection. - # - # Reading flags from a file is only intended for pre-production environments. Production - # environments should always be configured to receive flag updates from LaunchDarkly. - # - # To use this component, call {FileDataSource#factory}, and store its return value in the - # {Config#data_source} property of your LaunchDarkly client configuration. In the options - # to `factory`, set `paths` to the file path(s) of your data file(s): - # - # file_source = FileDataSource.factory(paths: [ myFilePath ]) - # config = LaunchDarkly::Config.new(data_source: file_source) - # - # This will cause the client not to connect to LaunchDarkly to get feature flags. The - # client may still make network connections to send analytics events, unless you have disabled - # this with {Config#send_events} or {Config#offline?}. - # - # Flag data files can be either JSON or YAML. They contain an object with three possible - # properties: - # - # - `flags`: Feature flag definitions. - # - `flagValues`: Simplified feature flags that contain only a value. - # - `segments`: User segment definitions. - # - # The format of the data in `flags` and `segments` is defined by the LaunchDarkly application - # and is subject to change. Rather than trying to construct these objects yourself, it is simpler - # to request existing flags directly from the LaunchDarkly server in JSON format, and use this - # output as the starting point for your file. In Linux you would do this: - # - # ``` - # curl -H "Authorization: YOUR_SDK_KEY" https://sdk.launchdarkly.com/sdk/latest-all - # ``` # - # The output will look something like this (but with many more properties): + # Deprecated entry point for the file data source feature. # - # { - # "flags": { - # "flag-key-1": { - # "key": "flag-key-1", - # "on": true, - # "variations": [ "a", "b" ] - # } - # }, - # "segments": { - # "segment-key-1": { - # "key": "segment-key-1", - # "includes": [ "user-key-1" ] - # } - # } - # } + # The new preferred usage is {LaunchDarkly::Integrations::FileData#data_source}. # - # Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported - # by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to - # set specific flag keys to specific values. For that, you can use a much simpler format: - # - # { - # "flagValues": { - # "my-string-flag-key": "value-1", - # "my-boolean-flag-key": true, - # "my-integer-flag-key": 3 - # } - # } - # - # Or, in YAML: - # - # flagValues: - # my-string-flag-key: "value-1" - # my-boolean-flag-key: true - # my-integer-flag-key: 1 - # - # It is also possible to specify both "flags" and "flagValues", if you want some flags - # to have simple values and others to have complex behavior. However, it is an error to use the - # same flag key or segment key more than once, either in a single file or across multiple files. - # - # If the data source encounters any error in any file-- malformed content, a missing file, or a - # duplicate key-- it will not load flags from any of the files. + # @deprecated This is replaced by {LaunchDarkly::Integrations::FileData}. # class FileDataSource # - # Returns a factory for the file data source component. - # - # @param options [Hash] the configuration options - # @option options [Array] :paths The paths of the source files for loading flag data. These - # may be absolute paths or relative to the current working directory. - # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. Auto-updating will only - # work if all of the files you specified have valid directory paths at startup time. - # Note that the default implementation of this feature is based on polling the filesystem, - # which may not perform well. If you install the 'listen' gem (not included by default, to - # avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be - # used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability. - # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for - # file modifications - used only if auto_update is true, and if the native file-watching - # mechanism from 'listen' is not being used. The default value is 1 second. - # @return an object that can be stored in {Config#data_source} + # Deprecated entry point for the file data source feature. # - def self.factory(options={}) - return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) } - end - end - - # @private - class FileDataSourceImpl - def initialize(feature_store, logger, options={}) - @feature_store = feature_store - @logger = logger - @paths = options[:paths] || [] - if @paths.is_a? String - @paths = [ @paths ] - end - @auto_update = options[:auto_update] - if @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests - # We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449). - # Therefore, on that platform we'll fall back to file polling instead. - if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.") - @use_listen = false - else - @use_listen = true - end - end - @poll_interval = options[:poll_interval] || 1 - @initialized = Concurrent::AtomicBoolean.new(false) - @ready = Concurrent::Event.new - end - - def initialized? - @initialized.value - end - - def start - ready = Concurrent::Event.new - - # We will return immediately regardless of whether the file load succeeded or failed - - # the difference can be detected by checking "initialized?" - ready.set - - load_all - - if @auto_update - # If we're going to watch files, then the start event will be set the first time we get - # a successful load. - @listener = start_listener - end - - ready - end - - def stop - @listener.stop if !@listener.nil? - end - - private - - def load_all - all_data = { - FEATURES => {}, - SEGMENTS => {} - } - @paths.each do |path| - begin - load_file(path, all_data) - rescue => exn - Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn) - return - end - end - @feature_store.init(all_data) - @initialized.make_true - end - - def load_file(path, all_data) - parsed = parse_content(IO.read(path)) - (parsed[:flags] || {}).each do |key, flag| - add_item(all_data, FEATURES, flag) - end - (parsed[:flagValues] || {}).each do |key, value| - add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value)) - end - (parsed[:segments] || {}).each do |key, segment| - add_item(all_data, SEGMENTS, segment) - end - end - - def parse_content(content) - # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while - # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least - # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.safe_load(content)) - end - - def symbolize_all_keys(value) - # This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and - # the SDK expects all objects to be formatted that way. - if value.is_a?(Hash) - value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h - elsif value.is_a?(Array) - value.map{ |v| symbolize_all_keys(v) } - else - value - end - end - - def add_item(all_data, kind, item) - items = all_data[kind] - raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash - key = item[:key].to_sym - if !items[key].nil? - raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" - end - items[key] = item - end - - def make_flag_with_value(key, value) - { - key: key, - on: true, - fallthrough: { variation: 0 }, - variations: [ value ] - } - end - - def start_listener - resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } - if @use_listen - start_listener_with_listen_gem(resolved_paths) - else - FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger) - end - end - - def start_listener_with_listen_gem(resolved_paths) - path_set = resolved_paths.to_set - dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq - opts = { latency: @poll_interval } - l = Listen.to(*dir_paths, opts) do |modified, added, removed| - paths = modified + added + removed - if paths.any? { |p| path_set.include?(p) } - load_all - end - end - l.start - l - end - + # The new preferred usage is {LaunchDarkly::Integrations::FileData#data_source}. # - # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available. + # @deprecated This is replaced by {LaunchDarkly::Integrations::FileData#data_source}. # - class FileDataSourcePoller - def initialize(resolved_paths, interval, reloader, logger) - @stopped = Concurrent::AtomicBoolean.new(false) - get_file_times = Proc.new do - ret = {} - resolved_paths.each do |path| - begin - ret[path] = File.mtime(path) - rescue Errno::ENOENT - ret[path] = nil - end - end - ret - end - last_times = get_file_times.call - @thread = Thread.new do - while true - sleep interval - break if @stopped.value - begin - new_times = get_file_times.call - changed = false - last_times.each do |path, old_time| - new_time = new_times[path] - if !new_time.nil? && new_time != old_time - changed = true - break - end - end - reloader.call if changed - rescue => exn - Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn) - end - end - end - end - - def stop - @stopped.make_true - @thread.run # wakes it up if it's sleeping - end + def self.factory(options={}) + LaunchDarkly::Integrations::FileData.data_source(options) end end end diff --git a/lib/ldclient-rb/impl/big_segments.rb b/lib/ldclient-rb/impl/big_segments.rb new file mode 100644 index 00000000..c2d82cd8 --- /dev/null +++ b/lib/ldclient-rb/impl/big_segments.rb @@ -0,0 +1,117 @@ +require "ldclient-rb/config" +require "ldclient-rb/expiring_cache" +require "ldclient-rb/impl/repeating_task" +require "ldclient-rb/interfaces" +require "ldclient-rb/util" + +require "digest" + +module LaunchDarkly + module Impl + BigSegmentMembershipResult = Struct.new(:membership, :status) + + class BigSegmentStoreManager + # use this as a singleton whenever a membership query returns nil; it's safe to reuse it because + # we will never modify the membership properties after they're queried + EMPTY_MEMBERSHIP = {} + + def initialize(big_segments_config, logger) + @store = big_segments_config.store + @stale_after_millis = big_segments_config.stale_after * 1000 + @status_provider = BigSegmentStoreStatusProviderImpl.new(-> { get_status }) + @logger = logger + @last_status = nil + + if !@store.nil? + @cache = ExpiringCache.new(big_segments_config.user_cache_size, big_segments_config.user_cache_time) + @poll_worker = RepeatingTask.new(big_segments_config.status_poll_interval, 0, -> { poll_store_and_update_status }, logger) + @poll_worker.start + end + end + + attr_reader :status_provider + + def stop + @poll_worker.stop if !@poll_worker.nil? + @store.stop if !@store.nil? + end + + def get_user_membership(user_key) + return nil if !@store + membership = @cache[user_key] + if !membership + begin + membership = @store.get_membership(BigSegmentStoreManager.hash_for_user_key(user_key)) + membership = EMPTY_MEMBERSHIP if membership.nil? + @cache[user_key] = membership + rescue => e + LaunchDarkly::Util.log_exception(@logger, "Big Segment store membership query returned error", e) + return BigSegmentMembershipResult.new(nil, BigSegmentsStatus::STORE_ERROR) + end + end + poll_store_and_update_status if !@last_status + if !@last_status.available + return BigSegmentMembershipResult.new(membership, BigSegmentsStatus::STORE_ERROR) + end + BigSegmentMembershipResult.new(membership, @last_status.stale ? BigSegmentsStatus::STALE : BigSegmentsStatus::HEALTHY) + end + + def get_status + @last_status || poll_store_and_update_status + end + + def poll_store_and_update_status + new_status = Interfaces::BigSegmentStoreStatus.new(false, false) # default to "unavailable" if we don't get a new status below + if !@store.nil? + begin + metadata = @store.get_metadata + new_status = Interfaces::BigSegmentStoreStatus.new(true, !metadata || is_stale(metadata.last_up_to_date)) + rescue => e + LaunchDarkly::Util.log_exception(@logger, "Big Segment store status query returned error", e) + end + end + @last_status = new_status + @status_provider.update_status(new_status) + + new_status + end + + def is_stale(timestamp) + !timestamp || ((Impl::Util.current_time_millis - timestamp) >= @stale_after_millis) + end + + def self.hash_for_user_key(user_key) + Digest::SHA256.base64digest(user_key) + end + end + + # + # Default implementation of the BigSegmentStoreStatusProvider interface. + # + # There isn't much to this because the real implementation is in BigSegmentStoreManager - we pass in a lambda + # that allows us to get the current status from that class. Also, the standard Observer methods such as + # add_observer are provided for us because BigSegmentStoreStatusProvider mixes in Observer, so all we need to + # to do make notifications happen is to call the Observer methods "changed" and "notify_observers". + # + class BigSegmentStoreStatusProviderImpl + include LaunchDarkly::Interfaces::BigSegmentStoreStatusProvider + + def initialize(status_fn) + @status_fn = status_fn + @last_status = nil + end + + def status + @status_fn.call + end + + def update_status(new_status) + if !@last_status || new_status != @last_status + @last_status = new_status + changed + notify_observers(new_status) + end + end + end + end +end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index 00898cd9..9e10c8ef 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -16,16 +16,28 @@ class Evaluator # flag data - or nil if the flag is unknown or deleted # @param get_segment [Function] similar to `get_flag`, but is used to query a user segment. # @param logger [Logger] the client's logger - def initialize(get_flag, get_segment, logger) + def initialize(get_flag, get_segment, get_big_segments_membership, logger) @get_flag = get_flag @get_segment = get_segment + @get_big_segments_membership = get_big_segments_membership @logger = logger end - # Used internally to hold an evaluation result and the events that were generated from prerequisites. The - # `detail` property is an EvaluationDetail. The `events` property can be either an array of feature request - # events or nil. - EvalResult = Struct.new(:detail, :events) + # Used internally to hold an evaluation result and additional state that may be accumulated during an + # evaluation. It's simpler and a bit more efficient to represent these as mutable properties rather than + # trying to use a pure functional approach, and since we're not exposing this object to any application code + # or retaining it anywhere, we don't have to be quite as strict about immutability. + # + # The big_segments_status and big_segments_membership properties are not used by the caller; they are used + # during an evaluation to cache the result of any Big Segments query that we've done for this user, because + # we don't want to do multiple queries for the same user if multiple Big Segments are referenced in the same + # evaluation. + EvalResult = Struct.new( + :detail, # the EvaluationDetail representing the evaluation result + :events, # an array of evaluation events generated by prerequisites, or nil + :big_segments_status, + :big_segments_membership + ) # Helper function used internally to construct an EvaluationDetail for an error result. def self.error_result(errorKind, value = nil) @@ -42,30 +54,38 @@ def self.error_result(errorKind, value = nil) # evaluated; the caller is responsible for constructing the feature event for the top-level evaluation # @return [EvalResult] the evaluation result def evaluate(flag, user, event_factory) + result = EvalResult.new if user.nil? || user[:key].nil? - return EvalResult.new(Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED), []) + result.detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED) + return result end - - # If the flag doesn't have any prerequisites (which most flags don't) then it cannot generate any feature - # request events for prerequisites and we can skip allocating an array. - if flag[:prerequisites] && !flag[:prerequisites].empty? - events = [] - else - events = nil + + detail = eval_internal(flag, user, result, event_factory) + if !result.big_segments_status.nil? + # If big_segments_status is non-nil at the end of the evaluation, it means a query was done at + # some point and we will want to include the status in the evaluation reason. + detail = EvaluationDetail.new(detail.value, detail.variation_index, + detail.reason.with_big_segments_status(result.big_segments_status)) end + result.detail = detail + return result + end - detail = eval_internal(flag, user, events, event_factory) - return EvalResult.new(detail, events.nil? || events.empty? ? nil : events) + def self.make_big_segment_ref(segment) # method is visible for testing + # The format of Big Segment references is independent of what store implementation is being + # used; the store implementation receives only this string and does not know the details of + # the data model. The Relay Proxy will use the same format when writing to the store. + "#{segment[:key]}.g#{segment[:generation]}" end private - def eval_internal(flag, user, events, event_factory) + def eval_internal(flag, user, state, event_factory) if !flag[:on] return get_off_value(flag, EvaluationReason::off) end - prereq_failure_reason = check_prerequisites(flag, user, events, event_factory) + prereq_failure_reason = check_prerequisites(flag, user, state, event_factory) if !prereq_failure_reason.nil? return get_off_value(flag, prereq_failure_reason) end @@ -83,7 +103,7 @@ def eval_internal(flag, user, events, event_factory) rules = flag[:rules] || [] rules.each_index do |i| rule = rules[i] - if rule_match_user(rule, user) + if rule_match_user(rule, user, state) reason = rule[:_reason] # try to use cached reason for this rule reason = EvaluationReason::rule_match(i, rule[:id]) if reason.nil? return get_value_for_variation_or_rollout(flag, rule, user, reason) @@ -98,7 +118,7 @@ def eval_internal(flag, user, events, event_factory) return EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough) end - def check_prerequisites(flag, user, events, event_factory) + def check_prerequisites(flag, user, state, event_factory) (flag[:prerequisites] || []).each do |prerequisite| prereq_ok = true prereq_key = prerequisite[:key] @@ -109,14 +129,15 @@ def check_prerequisites(flag, user, events, event_factory) prereq_ok = false else begin - prereq_res = eval_internal(prereq_flag, user, events, event_factory) + prereq_res = eval_internal(prereq_flag, user, state, event_factory) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] prereq_ok = false end event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag) - events.push(event) + state.events = [] if state.events.nil? + state.events.push(event) rescue => exn Util.log_exception(@logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) prereq_ok = false @@ -130,23 +151,23 @@ def check_prerequisites(flag, user, events, event_factory) nil end - def rule_match_user(rule, user) + def rule_match_user(rule, user, state) return false if !rule[:clauses] (rule[:clauses] || []).each do |clause| - return false if !clause_match_user(clause, user) + return false if !clause_match_user(clause, user, state) end return true end - def clause_match_user(clause, user) + def clause_match_user(clause, user, state) # In the case of a segment match operator, we check if the user is in any of the segments, # and possibly negate if clause[:op].to_sym == :segmentMatch result = (clause[:values] || []).any? { |v| segment = @get_segment.call(v) - !segment.nil? && segment_match_user(segment, user) + !segment.nil? && segment_match_user(segment, user, state) } clause[:negate] ? !result : result else @@ -168,11 +189,42 @@ def clause_match_user_no_segments(clause, user) clause[:negate] ? !result : result end - def segment_match_user(segment, user) + def segment_match_user(segment, user, state) return false unless user[:key] + segment[:unbounded] ? big_segment_match_user(segment, user, state) : simple_segment_match_user(segment, user, true) + end - return true if segment[:included].include?(user[:key]) - return false if segment[:excluded].include?(user[:key]) + def big_segment_match_user(segment, user, state) + if !segment[:generation] + # Big segment queries can only be done if the generation is known. If it's unset, + # that probably means the data store was populated by an older SDK that doesn't know + # about the generation property and therefore dropped it from the JSON data. We'll treat + # that as a "not configured" condition. + state.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED + return false + end + if !state.big_segments_status + result = @get_big_segments_membership.nil? ? nil : @get_big_segments_membership.call(user[:key]) + if result + state.big_segments_membership = result.membership + state.big_segments_status = result.status + else + state.big_segments_membership = nil + state.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED + end + end + segment_ref = Evaluator.make_big_segment_ref(segment) + membership = state.big_segments_membership + included = membership.nil? ? nil : membership[segment_ref] + return included if !included.nil? + simple_segment_match_user(segment, user, false) + end + + def simple_segment_match_user(segment, user, use_includes_and_excludes) + if use_includes_and_excludes + return true if segment[:included].include?(user[:key]) + return false if segment[:excluded].include?(user[:key]) + end (segment[:rules] || []).each do |r| return true if segment_rule_match_user(r, user, segment[:key], segment[:salt]) diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index 464eb5e4..4085e53d 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -4,10 +4,7 @@ module LaunchDarkly module Impl module Integrations module DynamoDB - # - # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. - # - class DynamoDBFeatureStoreCore + class DynamoDBStoreImplBase begin require "aws-sdk-dynamodb" AWS_SDK_ENABLED = true @@ -19,29 +16,50 @@ class DynamoDBFeatureStoreCore AWS_SDK_ENABLED = false end end - + PARTITION_KEY = "namespace" SORT_KEY = "key" - VERSION_ATTRIBUTE = "version" - ITEM_JSON_ATTRIBUTE = "item" - def initialize(table_name, opts) if !AWS_SDK_ENABLED - raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem") + raise RuntimeError.new("can't use #{description} without the aws-sdk or aws-sdk-dynamodb gem") end - + @table_name = table_name - @prefix = opts[:prefix] + @prefix = opts[:prefix] ? (opts[:prefix] + ":") : "" @logger = opts[:logger] || Config.default_logger - + if !opts[:existing_client].nil? @client = opts[:existing_client] else @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end + + @logger.info("${description}: using DynamoDB table \"#{table_name}\"") + end + + def stop + # AWS client doesn't seem to have a close method + end - @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") + protected def description + "DynamoDB" + end + end + + # + # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. + # + class DynamoDBFeatureStoreCore < DynamoDBStoreImplBase + VERSION_ATTRIBUTE = "version" + ITEM_JSON_ATTRIBUTE = "item" + + def initialize(table_name, opts) + super(table_name, opts) + end + + def description + "DynamoDBFeatureStore" end def init_internal(all_data) @@ -124,14 +142,10 @@ def initialized_internal? !resp.item.nil? && resp.item.length > 0 end - def stop - # AWS client doesn't seem to have a close method - end - private def prefixed_namespace(base_str) - (@prefix.nil? || @prefix == "") ? base_str : "#{@prefix}:#{base_str}" + @prefix + base_str end def namespace_for_kind(kind) @@ -208,6 +222,56 @@ def unmarshal_item(kind, item) end end + class DynamoDBBigSegmentStore < DynamoDBStoreImplBase + KEY_METADATA = 'big_segments_metadata'; + KEY_USER_DATA = 'big_segments_user'; + ATTR_SYNC_TIME = 'synchronizedOn'; + ATTR_INCLUDED = 'included'; + ATTR_EXCLUDED = 'excluded'; + + def initialize(table_name, opts) + super(table_name, opts) + end + + def description + "DynamoDBBigSegmentStore" + end + + def get_metadata + key = @prefix + KEY_METADATA + data = @client.get_item( + table_name: @table_name, + key: { + PARTITION_KEY => key, + SORT_KEY => key + } + ) + timestamp = data.item && data.item[ATTR_SYNC_TIME] ? + data.item[ATTR_SYNC_TIME] : nil + LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(timestamp) + end + + def get_membership(user_hash) + data = @client.get_item( + table_name: @table_name, + key: { + PARTITION_KEY => @prefix + KEY_USER_DATA, + SORT_KEY => user_hash + }) + return nil if !data.item + excluded_refs = data.item[ATTR_EXCLUDED] || [] + included_refs = data.item[ATTR_INCLUDED] || [] + if excluded_refs.empty? && included_refs.empty? + nil + else + membership = {} + excluded_refs.each { |ref| membership[ref] = false } + included_refs.each { |ref| membership[ref] = true } + membership + end + end + end + class DynamoDBUtil # # Calls client.batch_write_item as many times as necessary to submit all of the given requests. diff --git a/lib/ldclient-rb/impl/integrations/file_data_source.rb b/lib/ldclient-rb/impl/integrations/file_data_source.rb new file mode 100644 index 00000000..d89e4e95 --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/file_data_source.rb @@ -0,0 +1,212 @@ +require 'ldclient-rb/in_memory_store' +require 'ldclient-rb/util' + +require 'concurrent/atomics' +require 'json' +require 'yaml' +require 'pathname' + +module LaunchDarkly + module Impl + module Integrations + class FileDataSourceImpl + # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the + # file data source or who don't need auto-updating, we only enable auto-update if the 'listen' + # gem has been provided by the host app. + @@have_listen = false + begin + require 'listen' + @@have_listen = true + rescue LoadError + end + + def initialize(feature_store, logger, options={}) + @feature_store = feature_store + @logger = logger + @paths = options[:paths] || [] + if @paths.is_a? String + @paths = [ @paths ] + end + @auto_update = options[:auto_update] + if @auto_update && @@have_listen && !options[:force_polling] # force_polling is used only for tests + # We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449). + # Therefore, on that platform we'll fall back to file polling instead. + if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.") + @use_listen = false + else + @use_listen = true + end + end + @poll_interval = options[:poll_interval] || 1 + @initialized = Concurrent::AtomicBoolean.new(false) + @ready = Concurrent::Event.new + end + + def initialized? + @initialized.value + end + + def start + ready = Concurrent::Event.new + + # We will return immediately regardless of whether the file load succeeded or failed - + # the difference can be detected by checking "initialized?" + ready.set + + load_all + + if @auto_update + # If we're going to watch files, then the start event will be set the first time we get + # a successful load. + @listener = start_listener + end + + ready + end + + def stop + @listener.stop if !@listener.nil? + end + + private + + def load_all + all_data = { + FEATURES => {}, + SEGMENTS => {} + } + @paths.each do |path| + begin + load_file(path, all_data) + rescue => exn + LaunchDarkly::Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn) + return + end + end + @feature_store.init(all_data) + @initialized.make_true + end + + def load_file(path, all_data) + parsed = parse_content(IO.read(path)) + (parsed[:flags] || {}).each do |key, flag| + add_item(all_data, FEATURES, flag) + end + (parsed[:flagValues] || {}).each do |key, value| + add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value)) + end + (parsed[:segments] || {}).each do |key, segment| + add_item(all_data, SEGMENTS, segment) + end + end + + def parse_content(content) + # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while + # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least + # for all the samples of actual flag data that we've tested). + symbolize_all_keys(YAML.safe_load(content)) + end + + def symbolize_all_keys(value) + # This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and + # the SDK expects all objects to be formatted that way. + if value.is_a?(Hash) + value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h + elsif value.is_a?(Array) + value.map{ |v| symbolize_all_keys(v) } + else + value + end + end + + def add_item(all_data, kind, item) + items = all_data[kind] + raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash + key = item[:key].to_sym + if !items[key].nil? + raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" + end + items[key] = item + end + + def make_flag_with_value(key, value) + { + key: key, + on: true, + fallthrough: { variation: 0 }, + variations: [ value ] + } + end + + def start_listener + resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } + if @use_listen + start_listener_with_listen_gem(resolved_paths) + else + FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger) + end + end + + def start_listener_with_listen_gem(resolved_paths) + path_set = resolved_paths.to_set + dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq + opts = { latency: @poll_interval } + l = Listen.to(*dir_paths, opts) do |modified, added, removed| + paths = modified + added + removed + if paths.any? { |p| path_set.include?(p) } + load_all + end + end + l.start + l + end + + # + # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available. + # + class FileDataSourcePoller + def initialize(resolved_paths, interval, reloader, logger) + @stopped = Concurrent::AtomicBoolean.new(false) + get_file_times = Proc.new do + ret = {} + resolved_paths.each do |path| + begin + ret[path] = File.mtime(path) + rescue Errno::ENOENT + ret[path] = nil + end + end + ret + end + last_times = get_file_times.call + @thread = Thread.new do + while true + sleep interval + break if @stopped.value + begin + new_times = get_file_times.call + changed = false + last_times.each do |path, old_time| + new_time = new_times[path] + if !new_time.nil? && new_time != old_time + changed = true + break + end + end + reloader.call if changed + rescue => exn + LaunchDarkly::Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn) + end + end + end + end + + def stop + @stopped.make_true + @thread.run # wakes it up if it's sleeping + end + end + end + end + end +end diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index f948e54a..193a50da 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -5,10 +5,7 @@ module LaunchDarkly module Impl module Integrations module Redis - # - # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. - # - class RedisFeatureStoreCore + class RedisStoreImplBase begin require "redis" require "connection_pool" @@ -19,22 +16,14 @@ class RedisFeatureStoreCore def initialize(opts) if !REDIS_ENABLED - raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool") + raise RuntimeError.new("can't use #{description} because one of these gems is missing: redis, connection_pool") end - @redis_opts = opts[:redis_opts] || Hash.new - if opts[:redis_url] - @redis_opts[:url] = opts[:redis_url] - end - if !@redis_opts.include?(:url) - @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url - end - max_connections = opts[:max_connections] || 16 - @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do - ::Redis.new(@redis_opts) - end + @pool = create_redis_pool(opts) + # shutdown pool on close unless the client passed a custom pool and specified not to shutdown @pool_shutdown_on_close = (!opts[:pool] || opts.fetch(:pool_shutdown_on_close, true)) + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix @logger = opts[:logger] || Config.default_logger @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented @@ -42,10 +31,53 @@ def initialize(opts) @stopped = Concurrent::AtomicBoolean.new(false) with_connection do |redis| - @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ - and prefix: #{@prefix}") + @logger.info("#{description}: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} and prefix: #{@prefix}") + end + end + + def stop + if @stopped.make_true + return unless @pool_shutdown_on_close + @pool.shutdown { |redis| redis.close } + end + end + + protected def description + "Redis" + end + + protected def with_connection + @pool.with { |redis| yield(redis) } + end + + private def create_redis_pool(opts) + redis_opts = opts[:redis_opts] ? opts[:redis_opts].clone : Hash.new + if opts[:redis_url] + redis_opts[:url] = opts[:redis_url] + end + if !redis_opts.include?(:url) + redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url + end + max_connections = opts[:max_connections] || 16 + return opts[:pool] || ConnectionPool.new(size: max_connections) do + ::Redis.new(redis_opts) end end + end + + # + # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. + # + class RedisFeatureStoreCore < RedisStoreImplBase + def initialize(opts) + super(opts) + + @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented + end + + def description + "RedisFeatureStore" + end def init_internal(all_data) count = 0 @@ -103,8 +135,7 @@ def upsert_internal(kind, new_item) else final_item = old_item action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ - in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } + @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } end redis.unwatch end @@ -117,13 +148,6 @@ def initialized_internal? with_connection { |redis| redis.exists?(inited_key) } end - def stop - if @stopped.make_true - return unless @pool_shutdown_on_close - @pool.shutdown { |redis| redis.close } - end - end - private def before_update_transaction(base_key, key) @@ -142,14 +166,43 @@ def inited_key @prefix + ":$inited" end - def with_connection - @pool.with { |redis| yield(redis) } - end - def get_redis(redis, kind, key) Model.deserialize(kind, redis.hget(items_key(kind), key)) end end + + # + # Internal implementation of the Redis big segment store. + # + class RedisBigSegmentStore < RedisStoreImplBase + KEY_LAST_UP_TO_DATE = ':big_segments_synchronized_on' + KEY_USER_INCLUDE = ':big_segment_include:' + KEY_USER_EXCLUDE = ':big_segment_exclude:' + + def description + "RedisBigSegmentStore" + end + + def get_metadata + value = with_connection { |redis| redis.get(@prefix + KEY_LAST_UP_TO_DATE) } + Interfaces::BigSegmentStoreMetadata.new(value.nil? ? nil : value.to_i) + end + + def get_membership(user_hash) + with_connection do |redis| + included_refs = redis.smembers(@prefix + KEY_USER_INCLUDE + user_hash) + excluded_refs = redis.smembers(@prefix + KEY_USER_EXCLUDE + user_hash) + if !included_refs && !excluded_refs + nil + else + membership = {} + excluded_refs.each { |ref| membership[ref] = false } + included_refs.each { |ref| membership[ref] = true } + membership + end + end + end + end end end end diff --git a/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb b/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb new file mode 100644 index 00000000..a2799a7d --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb @@ -0,0 +1,40 @@ +require 'concurrent/atomics' +require 'ldclient-rb/interfaces' + +module LaunchDarkly + module Impl + module Integrations + module TestData + # @private + class TestDataSource + include LaunchDarkly::Interfaces::DataSource + + def initialize(feature_store, test_data) + @feature_store = feature_store + @test_data = test_data + end + + def initialized? + true + end + + def start + ready = Concurrent::Event.new + ready.set + init_data = @test_data.make_init_data + @feature_store.init(init_data) + ready + end + + def stop + @test_data.closed_instance(self) + end + + def upsert(kind, item) + @feature_store.upsert(kind, item) + end + end + end + end + end +end diff --git a/lib/ldclient-rb/impl/repeating_task.rb b/lib/ldclient-rb/impl/repeating_task.rb new file mode 100644 index 00000000..bb0255fe --- /dev/null +++ b/lib/ldclient-rb/impl/repeating_task.rb @@ -0,0 +1,47 @@ +require "ldclient-rb/util" + +require "concurrent/atomics" + +module LaunchDarkly + module Impl + class RepeatingTask + def initialize(interval, start_delay, task, logger) + @interval = interval + @start_delay = start_delay + @task = task + @logger = logger + @stopped = Concurrent::AtomicBoolean.new(false) + @worker = nil + end + + def start + @worker = Thread.new do + if @start_delay + sleep(@start_delay) + end + while !@stopped.value do + started_at = Time.now + begin + @task.call + rescue => e + LaunchDarkly::Util.log_exception(@logger, "Uncaught exception from repeating task", e) + end + delta = @interval - (Time.now - started_at) + if delta > 0 + sleep(delta) + end + end + end + end + + def stop + if @stopped.make_true + if @worker && @worker.alive? && @worker != Thread.current + @worker.run # causes the thread to wake up if it's currently in a sleep + @worker.join + end + end + end + end + end +end diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb index d1197afe..5fe93a2b 100644 --- a/lib/ldclient-rb/impl/util.rb +++ b/lib/ldclient-rb/impl/util.rb @@ -1,7 +1,10 @@ - module LaunchDarkly module Impl module Util + def self.is_bool(aObject) + [true,false].include? aObject + end + def self.current_time_millis (Time.now.to_f * 1000).to_i end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 8c9f6249..2a2ac216 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,55 +1,6 @@ require "ldclient-rb/integrations/consul" require "ldclient-rb/integrations/dynamodb" +require "ldclient-rb/integrations/file_data" require "ldclient-rb/integrations/redis" +require "ldclient-rb/integrations/test_data" require "ldclient-rb/integrations/util/store_wrapper" - -module LaunchDarkly - # - # Tools for connecting the LaunchDarkly client to other software. - # - module Integrations - # - # Integration with [Consul](https://www.consul.io/). - # - # Note that in order to use this integration, you must first install the gem `diplomat`. - # - # @since 5.5.0 - # - module Consul - # code is in ldclient-rb/impl/integrations/consul_impl - end - - # - # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). - # - # Note that in order to use this integration, you must first install one of the AWS SDK gems: either - # `aws-sdk-dynamodb`, or the full `aws-sdk`. - # - # @since 5.5.0 - # - module DynamoDB - # code is in ldclient-rb/impl/integrations/dynamodb_impl - end - - # - # Integration with [Redis](https://redis.io/). - # - # Note that in order to use this integration, you must first install the `redis` and `connection-pool` - # gems. - # - # @since 5.5.0 - # - module Redis - # code is in ldclient-rb/impl/integrations/redis_impl - end - - # - # Support code that may be helpful in creating integrations. - # - # @since 5.5.0 - # - module Util - # code is in ldclient-rb/integrations/util/ - end - end -end diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 4f32d5fd..020c31b4 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -3,6 +3,13 @@ module LaunchDarkly module Integrations + # + # Integration with [Consul](https://www.consul.io/). + # + # Note that in order to use this integration, you must first install the gem `diplomat`. + # + # @since 5.5.0 + # module Consul # # Default value for the `prefix` option for {new_feature_store}. diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index c3af07d5..229a64af 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -3,6 +3,14 @@ module LaunchDarkly module Integrations + # + # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). + # + # Note that in order to use this integration, you must first install one of the AWS SDK gems: either + # `aws-sdk-dynamodb`, or the full `aws-sdk`. + # + # @since 5.5.0 + # module DynamoDB # # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can @@ -40,7 +48,44 @@ module DynamoDB # def self.new_feature_store(table_name, opts) core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts) - return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + end + + # + # Creates a DynamoDB-backed Big Segment store. + # + # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or + # the full `aws-sdk`. Then, put the object returned by this method into the `store` property of your + # Big Segments configuration (see `Config`). + # + # @example Configuring Big Segments + # store = LaunchDarkly::Integrations::DynamoDB::new_big_segment_store("my-table-name") + # config = LaunchDarkly::Config.new(big_segments: LaunchDarkly::BigSegmentsConfig.new(store: store) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) + # + # Note that the specified table must already exist in DynamoDB. It must have a partition key called + # "namespace", and a sort key called "key" (both strings). The SDK does not create the table + # automatically because it has no way of knowing what additional properties (such as permissions + # and throughput) you would want it to have. + # + # By default, the DynamoDB client will try to get your AWS credentials and region name from + # environment variables and/or local configuration files, as described in the AWS SDK documentation. + # You can also specify any supported AWS SDK options in `dynamodb_opts`-- or, provide an + # already-configured DynamoDB client in `existing_client`. + # + # @param opts [Hash] the configuration options (these are all the same as for `new_feature_store`, + # except that there are no caching parameters) + # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) + # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use + # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @return [LaunchDarkly::Interfaces::BigSegmentStore] a Big Segment store object + # + def self.new_big_segment_store(table_name, opts) + LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBBigSegmentStore.new(table_name, opts) end end end diff --git a/lib/ldclient-rb/integrations/file_data.rb b/lib/ldclient-rb/integrations/file_data.rb new file mode 100644 index 00000000..370d3aa6 --- /dev/null +++ b/lib/ldclient-rb/integrations/file_data.rb @@ -0,0 +1,108 @@ +require 'ldclient-rb/impl/integrations/file_data_source' + +module LaunchDarkly + module Integrations + # + # Provides a way to use local files as a source of feature flag state. This allows using a + # predetermined feature flag state without an actual LaunchDarkly connection. + # + # Reading flags from a file is only intended for pre-production environments. Production + # environments should always be configured to receive flag updates from LaunchDarkly. + # + # To use this component, call {FileData#data_source}, and store its return value in the + # {Config#data_source} property of your LaunchDarkly client configuration. In the options + # to `data_source`, set `paths` to the file path(s) of your data file(s): + # + # file_source = LaunchDarkly::Integrations::FileData.data_source(paths: [ myFilePath ]) + # config = LaunchDarkly::Config.new(data_source: file_source) + # + # This will cause the client not to connect to LaunchDarkly to get feature flags. The + # client may still make network connections to send analytics events, unless you have disabled + # this with {Config#send_events} or {Config#offline?}. + # + # Flag data files can be either JSON or YAML. They contain an object with three possible + # properties: + # + # - `flags`: Feature flag definitions. + # - `flagValues`: Simplified feature flags that contain only a value. + # - `segments`: User segment definitions. + # + # The format of the data in `flags` and `segments` is defined by the LaunchDarkly application + # and is subject to change. Rather than trying to construct these objects yourself, it is simpler + # to request existing flags directly from the LaunchDarkly server in JSON format, and use this + # output as the starting point for your file. In Linux you would do this: + # + # ``` + # curl -H "Authorization: YOUR_SDK_KEY" https://sdk.launchdarkly.com/sdk/latest-all + # ``` + # + # The output will look something like this (but with many more properties): + # + # { + # "flags": { + # "flag-key-1": { + # "key": "flag-key-1", + # "on": true, + # "variations": [ "a", "b" ] + # } + # }, + # "segments": { + # "segment-key-1": { + # "key": "segment-key-1", + # "includes": [ "user-key-1" ] + # } + # } + # } + # + # Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + # by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + # set specific flag keys to specific values. For that, you can use a much simpler format: + # + # { + # "flagValues": { + # "my-string-flag-key": "value-1", + # "my-boolean-flag-key": true, + # "my-integer-flag-key": 3 + # } + # } + # + # Or, in YAML: + # + # flagValues: + # my-string-flag-key: "value-1" + # my-boolean-flag-key: true + # my-integer-flag-key: 1 + # + # It is also possible to specify both "flags" and "flagValues", if you want some flags + # to have simple values and others to have complex behavior. However, it is an error to use the + # same flag key or segment key more than once, either in a single file or across multiple files. + # + # If the data source encounters any error in any file-- malformed content, a missing file, or a + # duplicate key-- it will not load flags from any of the files. + # + module FileData + # + # Returns a factory for the file data source component. + # + # @param options [Hash] the configuration options + # @option options [Array] :paths The paths of the source files for loading flag data. These + # may be absolute paths or relative to the current working directory. + # @option options [Boolean] :auto_update True if the data source should watch for changes to + # the source file(s) and reload flags whenever there is a change. Auto-updating will only + # work if all of the files you specified have valid directory paths at startup time. + # Note that the default implementation of this feature is based on polling the filesystem, + # which may not perform well. If you install the 'listen' gem (not included by default, to + # avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be + # used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability. + # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for + # file modifications - used only if auto_update is true, and if the native file-watching + # mechanism from 'listen' is not being used. The default value is 1 second. + # @return an object that can be stored in {Config#data_source} + # + def self.data_source(options={}) + return lambda { |sdk_key, config| + Impl::Integrations::FileDataSourceImpl.new(config.feature_store, config.logger, options) } + end + end + end +end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 5792d554..6fed732d 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -2,6 +2,14 @@ module LaunchDarkly module Integrations + # + # Integration with [Redis](https://redis.io/). + # + # Note that in order to use this integration, you must first install the `redis` and `connection-pool` + # gems. + # + # @since 5.5.0 + # module Redis # # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of @@ -53,6 +61,38 @@ def self.default_prefix def self.new_feature_store(opts) return RedisFeatureStore.new(opts) end + + # + # Creates a Redis-backed Big Segment store. + # + # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, + # put the object returned by this method into the `store` property of your Big Segments configuration + # (see `Config`). + # + # @example Configuring Big Segments + # store = LaunchDarkly::Integrations::Redis::new_big_segment_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(big_segments: LaunchDarkly::BigSegmentsConfig.new(store: store) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) + # + # @param opts [Hash] the configuration options (these are all the same as for `new_feature_store`, + # except that there are no caching parameters) + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Object] :pool custom connection pool, if desired + # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool; + # this is true by default, and should be set to false only if you are managing the pool yourself and want its + # lifecycle to be independent of the SDK client + # @return [LaunchDarkly::Interfaces::BigSegmentStore] a Big Segment store object + # + def self.new_big_segment_store(opts) + return LaunchDarkly::Impl::Integrations::Redis::RedisBigSegmentStore.new(opts) + end end end end diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb new file mode 100644 index 00000000..8cbcc980 --- /dev/null +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -0,0 +1,209 @@ +require 'ldclient-rb/impl/integrations/test_data/test_data_source' +require 'ldclient-rb/integrations/test_data/flag_builder' + +require 'concurrent/atomics' + +module LaunchDarkly + module Integrations + # + # A mechanism for providing dynamically updatable feature flag state in a simplified form to an SDK + # client in test scenarios. + # + # Unlike {LaunchDarkly::Integrations::FileData}, this mechanism does not use any external resources. It + # provides only the data that the application has put into it using the {#update} method. + # + # @example + # td = LaunchDarkly::Integrations::TestData.data_source + # td.update(td.flag("flag-key-1").variation_for_all_users(true)) + # config = LaunchDarkly::Config.new(data_source: td) + # client = LaunchDarkly::LDClient.new('sdkKey', config) + # # flags can be updated at any time: + # td.update(td.flag("flag-key-2") + # .variation_for_user("some-user-key", true) + # .fallthrough_variation(false)) + # + # The above example uses a simple boolean flag, but more complex configurations are possible using + # the methods of the {FlagBuilder} that is returned by {#flag}. {FlagBuilder} + # supports many of the ways a flag can be configured on the LaunchDarkly dashboard, but does not + # currently support 1. rule operators other than "in" and "not in", or 2. percentage rollouts. + # + # If the same `TestData` instance is used to configure multiple `LDClient` instances, + # any changes made to the data will propagate to all of the `LDClient`s. + # + # @since 6.3.0 + # + class TestData + # Creates a new instance of the test data source. + # + # @return [TestData] a new configurable test data source + def self.data_source + self.new + end + + # @private + def initialize + @flag_builders = Hash.new + @current_flags = Hash.new + @current_segments = Hash.new + @instances = Array.new + @instances_lock = Concurrent::ReadWriteLock.new + @lock = Concurrent::ReadWriteLock.new + end + + # + # Called internally by the SDK to determine what arguments to pass to call + # You do not need to call this method. + # + # @private + def arity + 2 + end + + # + # Called internally by the SDK to associate this test data source with an {@code LDClient} instance. + # You do not need to call this method. + # + # @private + def call(_, config) + impl = LaunchDarkly::Impl::Integrations::TestData::TestDataSource.new(config.feature_store, self) + @instances_lock.with_write_lock { @instances.push(impl) } + impl + end + + # + # Creates or copies a {FlagBuilder} for building a test flag configuration. + # + # If this flag key has already been defined in this `TestData` instance, then the builder + # starts with the same configuration that was last provided for this flag. + # + # Otherwise, it starts with a new default configuration in which the flag has `true` and + # `false` variations, is `true` for all users when targeting is turned on and + # `false` otherwise, and currently has targeting turned on. You can change any of those + # properties, and provide more complex behavior, using the {FlagBuilder} methods. + # + # Once you have set the desired configuration, pass the builder to {#update}. + # + # @param key [String] the flag key + # @return [FlagBuilder] a flag configuration builder + # + def flag(key) + existing_builder = @lock.with_read_lock { @flag_builders[key] } + if existing_builder.nil? then + FlagBuilder.new(key).boolean_flag + else + existing_builder.clone + end + end + + # + # Updates the test data with the specified flag configuration. + # + # This has the same effect as if a flag were added or modified on the LaunchDarkly dashboard. + # It immediately propagates the flag change to any `LDClient` instance(s) that you have + # already configured to use this `TestData`. If no `LDClient` has been started yet, + # it simply adds this flag to the test data which will be provided to any `LDClient` that + # you subsequently configure. + # + # Any subsequent changes to this {FlagBuilder} instance do not affect the test data, + # unless you call {#update} again. + # + # @param flag_builder [FlagBuilder] a flag configuration builder + # @return [TestData] the TestData instance + # + def update(flag_builder) + new_flag = nil + @lock.with_write_lock do + @flag_builders[flag_builder.key] = flag_builder + version = 0 + flag_key = flag_builder.key.to_sym + if @current_flags[flag_key] then + version = @current_flags[flag_key][:version] + end + new_flag = flag_builder.build(version+1) + @current_flags[flag_key] = new_flag + end + update_item(FEATURES, new_flag) + self + end + + # + # Copies a full feature flag data model object into the test data. + # + # It immediately propagates the flag change to any `LDClient` instance(s) that you have already + # configured to use this `TestData`. If no `LDClient` has been started yet, it simply adds + # this flag to the test data which will be provided to any LDClient that you subsequently + # configure. + # + # Use this method if you need to use advanced flag configuration properties that are not supported by + # the simplified {FlagBuilder} API. Otherwise it is recommended to use the regular {flag}/{update} + # mechanism to avoid dependencies on details of the data model. + # + # You cannot make incremental changes with {flag}/{update} to a flag that has been added in this way; + # you can only replace it with an entirely new flag configuration. + # + # @param flag [Hash] the flag configuration + # @return [TestData] the TestData instance + # + def use_preconfigured_flag(flag) + use_preconfigured_item(FEATURES, flag, @current_flags) + end + + # + # Copies a full user segment data model object into the test data. + # + # It immediately propagates the change to any `LDClient` instance(s) that you have already + # configured to use this `TestData`. If no `LDClient` has been started yet, it simply adds + # this segment to the test data which will be provided to any LDClient that you subsequently + # configure. + # + # This method is currently the only way to inject user segment data, since there is no builder + # API for segments. It is mainly intended for the SDK's own tests of user segment functionality, + # since application tests that need to produce a desired evaluation state could do so more easily + # by just setting flag values. + # + # @param segment [Hash] the segment configuration + # @return [TestData] the TestData instance + # + def use_preconfigured_segment(segment) + use_preconfigured_item(SEGMENTS, segment, @current_segments) + end + + private def use_preconfigured_item(kind, item, current) + key = item[:key].to_sym + @lock.with_write_lock do + old_item = current[key] + if !old_item.nil? then + item = item.clone + item[:version] = old_item[:version] + 1 + end + current[key] = item + end + update_item(kind, item) + self + end + + private def update_item(kind, item) + @instances_lock.with_read_lock do + @instances.each do | instance | + instance.upsert(kind, item) + end + end + end + + # @private + def make_init_data + @lock.with_read_lock do + { + FEATURES => @current_flags.clone, + SEGMENTS => @current_segments.clone + } + end + end + + # @private + def closed_instance(instance) + @instances_lock.with_write_lock { @instances.delete(instance) } + end + end + end +end diff --git a/lib/ldclient-rb/integrations/test_data/flag_builder.rb b/lib/ldclient-rb/integrations/test_data/flag_builder.rb new file mode 100644 index 00000000..79d6247b --- /dev/null +++ b/lib/ldclient-rb/integrations/test_data/flag_builder.rb @@ -0,0 +1,438 @@ +require 'ldclient-rb/util' + +module LaunchDarkly + module Integrations + class TestData + # + # A builder for feature flag configurations to be used with {TestData}. + # + # @see TestData#flag + # @see TestData#update + # + class FlagBuilder + attr_reader :key + + # @private + def initialize(key) + @key = key + @on = true + @variations = [] + end + + # @private + def initialize_copy(other) + super(other) + @variations = @variations.clone + @rules = @rules.nil? ? nil : deep_copy_array(@rules) + @targets = @targets.nil? ? nil : deep_copy_hash(@targets) + end + + # + # Sets targeting to be on or off for this flag. + # + # The effect of this depends on the rest of the flag configuration, just as it does on the + # real LaunchDarkly dashboard. In the default configuration that you get from calling + # {TestData#flag} with a new flag key, the flag will return `false` + # whenever targeting is off, and `true` when targeting is on. + # + # @param on [Boolean] true if targeting should be on + # @return [FlagBuilder] the builder + # + def on(on) + @on = on + self + end + + # + # Specifies the fallthrough variation. The fallthrough is the value + # that is returned if targeting is on and the user was not matched by a more specific + # target or rule. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation [Boolean, Integer] true or false or the desired fallthrough variation index: + # 0 for the first, 1 for the second, etc. + # @return [FlagBuilder] the builder + # + def fallthrough_variation(variation) + if LaunchDarkly::Impl::Util.is_bool variation then + boolean_flag.fallthrough_variation(variation_for_boolean(variation)) + else + @fallthrough_variation = variation + self + end + end + + # + # Specifies the off variation for a flag. This is the variation that is returned + # whenever targeting is off. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation [Boolean, Integer] true or false or the desired off variation index: + # 0 for the first, 1 for the second, etc. + # @return [FlagBuilder] the builder + # + def off_variation(variation) + if LaunchDarkly::Impl::Util.is_bool variation then + boolean_flag.off_variation(variation_for_boolean(variation)) + else + @off_variation = variation + self + end + end + + # + # Changes the allowable variation values for the flag. + # + # The value may be of any valid JSON type. For instance, a boolean flag + # normally has `true, false`; a string-valued flag might have + # `'red', 'green'`; etc. + # + # @example A single variation + # td.flag('new-flag') + # .variations(true) + # + # @example Multiple variations + # td.flag('new-flag') + # .variations('red', 'green', 'blue') + # + # @param variations [Array] the the desired variations + # @return [FlagBuilder] the builder + # + def variations(*variations) + @variations = variations + self + end + + # + # Sets the flag to always return the specified variation for all users. + # + # The variation is specified, Targeting is switched on, and any existing targets or rules are removed. + # The fallthrough variation is set to the specified value. The off variation is left unchanged. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation [Boolean, Integer] true or false or the desired variation index to return: + # 0 for the first, 1 for the second, etc. + # @return [FlagBuilder] the builder + # + def variation_for_all_users(variation) + if LaunchDarkly::Impl::Util.is_bool variation then + boolean_flag.variation_for_all_users(variation_for_boolean(variation)) + else + on(true).clear_rules.clear_user_targets.fallthrough_variation(variation) + end + end + + # + # Sets the flag to always return the specified variation value for all users. + # + # The value may be of any valid JSON type. This method changes the + # flag to have only a single variation, which is this value, and to return the same + # variation regardless of whether targeting is on or off. Any existing targets or rules + # are removed. + # + # @param value [Object] the desired value to be returned for all users + # @return [FlagBuilder] the builder + # + def value_for_all_users(value) + variations(value).variation_for_all_users(0) + end + + # + # Sets the flag to return the specified variation for a specific user key when targeting + # is on. + # + # This has no effect when targeting is turned off for the flag. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param user_key [String] a user key + # @param variation [Boolean, Integer] true or false or the desired variation index to return: + # 0 for the first, 1 for the second, etc. + # @return [FlagBuilder] the builder + # + def variation_for_user(user_key, variation) + if LaunchDarkly::Impl::Util.is_bool variation then + boolean_flag.variation_for_user(user_key, variation_for_boolean(variation)) + else + if @targets.nil? then + @targets = Hash.new + end + @variations.count.times do | i | + if i == variation then + if @targets[i].nil? then + @targets[i] = [user_key] + else + @targets[i].push(user_key) + end + elsif not @targets[i].nil? then + @targets[i].delete(user_key) + end + end + self + end + end + + # + # Starts defining a flag rule, using the "is one of" operator. + # + # @example create a rule that returns `true` if the name is "Patsy" or "Edina" + # testData.flag("flag") + # .if_match(:name, 'Patsy', 'Edina') + # .then_return(true); + # + # @param attribute [Symbol] the user attribute to match against + # @param values [Array] values to compare to + # @return [FlagRuleBuilder] a flag rule builder + # + # @see FlagRuleBuilder#then_return + # @see FlagRuleBuilder#and_match + # @see FlagRuleBuilder#and_not_match + # + def if_match(attribute, *values) + FlagRuleBuilder.new(self).and_match(attribute, *values) + end + + # + # Starts defining a flag rule, using the "is not one of" operator. + # + # @example create a rule that returns `true` if the name is neither "Saffron" nor "Bubble" + # testData.flag("flag") + # .if_not_match(:name, 'Saffron', 'Bubble') + # .then_return(true) + # + # @param attribute [Symbol] the user attribute to match against + # @param values [Array] values to compare to + # @return [FlagRuleBuilder] a flag rule builder + # + # @see FlagRuleBuilder#then_return + # @see FlagRuleBuilder#and_match + # @see FlagRuleBuilder#and_not_match + # + def if_not_match(attribute, *values) + FlagRuleBuilder.new(self).and_not_match(attribute, *values) + end + + # + # Removes any existing user targets from the flag. + # This undoes the effect of methods like {#variation_for_user} + # + # @return [FlagBuilder] the same builder + # + def clear_user_targets + @targets = nil + self + end + + # + # Removes any existing rules from the flag. + # This undoes the effect of methods like {#if_match} + # + # @return [FlagBuilder] the same builder + # + def clear_rules + @rules = nil + self + end + + # @private + def add_rule(rule) + if @rules.nil? then + @rules = Array.new + end + @rules.push(rule) + self + end + + # + # A shortcut for setting the flag to use the standard boolean configuration. + # + # This is the default for all new flags created with {TestData#flag}. + # The flag will have two variations, `true` and `false` (in that order); + # it will return `false` whenever targeting is off, and `true` when targeting is on + # if no other settings specify otherwise. + # + # @return [FlagBuilder] the builder + # + def boolean_flag + if is_boolean_flag then + self + else + variations(true, false) + .fallthrough_variation(TRUE_VARIATION_INDEX) + .off_variation(FALSE_VARIATION_INDEX) + end + end + + # @private + def build(version) + res = { key: @key, + version: version, + on: @on, + variations: @variations, + } + + unless @off_variation.nil? then + res[:offVariation] = @off_variation + end + + unless @fallthrough_variation.nil? then + res[:fallthrough] = { variation: @fallthrough_variation } + end + + unless @targets.nil? then + res[:targets] = @targets.collect do | variation, values | + { variation: variation, values: values } + end + end + + unless @rules.nil? then + res[:rules] = @rules.each_with_index.collect { | rule, i | rule.build(i) } + end + + res + end + + # + # A builder for feature flag rules to be used with {FlagBuilder}. + # + # In the LaunchDarkly model, a flag can have any number of rules, and a rule can have any number of + # clauses. A clause is an individual test such as "name is 'X'". A rule matches a user if all of the + # rule's clauses match the user. + # + # To start defining a rule, use one of the flag builder's matching methods such as + # {FlagBuilder#if_match}. This defines the first clause for the rule. + # Optionally, you may add more clauses with the rule builder's methods such as + # {#and_match} or {#and_not_match}. + # Finally, call {#then_return} to finish defining the rule. + # + class FlagRuleBuilder + # @private + FlagRuleClause = Struct.new(:attribute, :op, :values, :negate, keyword_init: true) + + # @private + def initialize(flag_builder) + @flag_builder = flag_builder + @clauses = Array.new + end + + # @private + def intialize_copy(other) + super(other) + @clauses = @clauses.clone + end + + # + # Adds another clause, using the "is one of" operator. + # + # @example create a rule that returns `true` if the name is "Patsy" and the country is "gb" + # testData.flag("flag") + # .if_match(:name, 'Patsy') + # .and_match(:country, 'gb') + # .then_return(true) + # + # @param attribute [Symbol] the user attribute to match against + # @param values [Array] values to compare to + # @return [FlagRuleBuilder] the rule builder + # + def and_match(attribute, *values) + @clauses.push(FlagRuleClause.new( + attribute: attribute, + op: 'in', + values: values, + negate: false + )) + self + end + + # + # Adds another clause, using the "is not one of" operator. + # + # @example create a rule that returns `true` if the name is "Patsy" and the country is not "gb" + # testData.flag("flag") + # .if_match(:name, 'Patsy') + # .and_not_match(:country, 'gb') + # .then_return(true) + # + # @param attribute [Symbol] the user attribute to match against + # @param values [Array] values to compare to + # @return [FlagRuleBuilder] the rule builder + # + def and_not_match(attribute, *values) + @clauses.push(FlagRuleClause.new( + attribute: attribute, + op: 'in', + values: values, + negate: true + )) + self + end + + # + # Finishes defining the rule, specifying the result as either a boolean + # or a variation index. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation [Boolean, Integer] true or false or the desired variation index: + # 0 for the first, 1 for the second, etc. + # @return [FlagBuilder] the flag builder with this rule added + # + def then_return(variation) + if LaunchDarkly::Impl::Util.is_bool variation then + @variation = @flag_builder.variation_for_boolean(variation) + @flag_builder.boolean_flag.add_rule(self) + else + @variation = variation + @flag_builder.add_rule(self) + end + end + + # @private + def build(ri) + { + id: 'rule' + ri.to_s, + variation: @variation, + clauses: @clauses.collect(&:to_h) + } + end + end + + # @private + def variation_for_boolean(variation) + variation ? TRUE_VARIATION_INDEX : FALSE_VARIATION_INDEX + end + + private + + TRUE_VARIATION_INDEX = 0 + FALSE_VARIATION_INDEX = 1 + + def is_boolean_flag + @variations.size == 2 && + @variations[TRUE_VARIATION_INDEX] == true && + @variations[FALSE_VARIATION_INDEX] == false + end + + def deep_copy_hash(from) + to = Hash.new + from.each { |k, v| to[k] = v.clone } + to + end + + def deep_copy_array(from) + to = Array.new + from.each { |v| to.push(v.clone) } + to + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 26318d67..c94ace94 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -4,6 +4,11 @@ module LaunchDarkly module Integrations + # + # Support code that may be helpful in creating integrations. + # + # @since 5.5.0 + # module Util # # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 9ea0932b..b62a90fb 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -1,3 +1,4 @@ +require "observer" module LaunchDarkly # @@ -120,7 +121,8 @@ def stop # # The client has its own standard implementation, which uses either a streaming connection or # polling depending on your configuration. Normally you will not need to use another one - # except for testing purposes. {FileDataSource} provides one such test fixture. + # except for testing purposes. Two such test fixtures are {LaunchDarkly::Integrations::FileData} + # and {LaunchDarkly::Integrations::TestData}. # module DataSource # @@ -149,5 +151,153 @@ def start def stop end end + + module BigSegmentStore + # + # Returns information about the overall state of the store. This method will be called only + # when the SDK needs the latest state, so it should not be cached. + # + # @return [BigSegmentStoreMetadata] + # + def get_metadata + end + + # + # Queries the store for a snapshot of the current segment state for a specific user. + # + # The user_hash is a base64-encoded string produced by hashing the user key as defined by + # the Big Segments specification; the store implementation does not need to know the details + # of how this is done, because it deals only with already-hashed keys, but the string can be + # assumed to only contain characters that are valid in base64. + # + # The return value should be either a Hash, or nil if the user is not referenced in any big + # segments. Each key in the Hash is a "segment reference", which is how segments are + # identified in Big Segment data. This string is not identical to the segment key-- the SDK + # will add other information. The store implementation should not be concerned with the + # format of the string. Each value in the Hash is true if the user is explicitly included in + # the segment, false if the user is explicitly excluded from the segment-- and is not also + # explicitly included (that is, if both an include and an exclude existed in the data, the + # include would take precedence). If the user's status in a particular segment is undefined, + # there should be no key or value for that segment. + # + # This Hash may be cached by the SDK, so it should not be modified after it is created. It + # is a snapshot of the segment membership state at one point in time. + # + # @param user_hash [String] + # @return [Hash] true/false values for Big Segments that reference this user + # + def get_membership(user_hash) + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + # @return [void] + # + def stop + end + end + + # + # Values returned by {BigSegmentStore#get_metadata}. + # + class BigSegmentStoreMetadata + def initialize(last_up_to_date) + @last_up_to_date = last_up_to_date + end + + # The Unix epoch millisecond timestamp of the last update to the {BigSegmentStore}. It is + # nil if the store has never been updated. + # + # @return [Integer|nil] + attr_reader :last_up_to_date + end + + # + # Information about the status of a Big Segment store, provided by {BigSegmentStoreStatusProvider}. + # + # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + class BigSegmentStoreStatus + def initialize(available, stale) + @available = available + @stale = stale + end + + # True if the Big Segment store is able to respond to queries, so that the SDK can evaluate + # whether a user is in a segment or not. + # + # If this property is false, the store is not able to make queries (for instance, it may not have + # a valid database connection). In this case, the SDK will treat any reference to a Big Segment + # as if no users are included in that segment. Also, the {EvaluationReason} associated with + # with any flag evaluation that references a Big Segment when the store is not available will + # have a `big_segments_status` of `STORE_ERROR`. + # + # @return [Boolean] + attr_reader :available + + # True if the Big Segment store is available, but has not been updated within the amount of time + # specified by {BigSegmentsConfig#stale_after}. + # + # This may indicate that the LaunchDarkly Relay Proxy, which populates the store, has stopped + # running or has become unable to receive fresh data from LaunchDarkly. Any feature flag + # evaluations that reference a Big Segment will be using the last known data, which may be out + # of date. Also, the {EvaluationReason} associated with those evaluations will have a + # `big_segments_status` of `STALE`. + # + # @return [Boolean] + attr_reader :stale + + def ==(other) + self.available == other.available && self.stale == other.stale + end + end + + # + # An interface for querying the status of a Big Segment store. + # + # The Big Segment store is the component that receives information about Big Segments, normally + # from a database populated by the LaunchDarkly Relay Proxy. Big Segments are a specific type + # of user segments. For more information, read the LaunchDarkly documentation: + # https://docs.launchdarkly.com/home/users/big-segments + # + # An implementation of this interface is returned by {LDClient#big_segment_store_status_provider}. + # Application code never needs to implement this interface. + # + # There are two ways to interact with the status. One is to simply get the current status; if its + # `available` property is true, then the SDK is able to evaluate user membership in Big Segments, + # and the `stale`` property indicates whether the data might be out of date. + # + # The other way is to subscribe to status change notifications. Applications may wish to know if + # there is an outage in the Big Segment store, or if it has become stale (the Relay Proxy has + # stopped updating it with new data), since then flag evaluations that reference a Big Segment + # might return incorrect values. To allow finding out about status changes as soon as possible, + # `BigSegmentStoreStatusProvider` mixes in Ruby's + # [Observable](https://docs.ruby-lang.org/en/2.5.0/Observable.html) module to provide standard + # methods such as `add_observer`. Observers will be called with a new {BigSegmentStoreStatus} + # value whenever the status changes. + # + # @example Getting the current status + # status = client.big_segment_store_status_provider.status + # + # @example Subscribing to status notifications + # client.big_segment_store_status_provider.add_observer(self, :big_segments_status_changed) + # + # def big_segments_status_changed(new_status) + # puts "Big segment store status is now: #{new_status}" + # end + # + module BigSegmentStoreStatusProvider + include Observable + # + # Gets the current status of the store, if known. + # + # @return [BigSegmentStoreStatus] the status, or nil if the SDK has not yet queried the Big + # Segment store status + # + def status + end + end end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index ba2a7675..a8719773 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/big_segments" require "ldclient-rb/impl/diagnostic_events" require "ldclient-rb/impl/evaluator" require "ldclient-rb/impl/event_factory" @@ -57,10 +58,14 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) updated_config.instance_variable_set(:@feature_store, @store) @config = updated_config + @big_segment_store_manager = Impl::BigSegmentStoreManager.new(config.big_segments, @config.logger) + @big_segment_store_status_provider = @big_segment_store_manager.status_provider + get_flag = lambda { |key| @store.get(FEATURES, key) } get_segment = lambda { |key| @store.get(SEGMENTS, key) } - @evaluator = LaunchDarkly::Impl::Evaluator.new(get_flag, get_segment, @config.logger) - + get_big_segments_membership = lambda { |key| @big_segment_store_manager.get_user_membership(key) } + @evaluator = LaunchDarkly::Impl::Evaluator.new(get_flag, get_segment, get_big_segments_membership, @config.logger) + if !@config.offline? && @config.send_events && !@config.diagnostic_opt_out? diagnostic_accumulator = Impl::DiagnosticAccumulator.new(Impl::DiagnosticAccumulator.create_diagnostic_id(sdk_key)) else @@ -375,9 +380,18 @@ def close @config.logger.info { "[LDClient] Closing LaunchDarkly client..." } @data_source.stop @event_processor.stop + @big_segment_store_manager.stop @store.stop end + # + # Returns an interface for tracking the status of a Big Segment store. + # + # The {BigSegmentStoreStatusProvider} has methods for checking whether the Big Segment store + # is (as far as the SDK knows) currently operational and tracking changes in this status. + # + attr_reader :big_segment_store_status_provider + private def create_default_data_source(sdk_key, config, diagnostic_accumulator) diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index a9312413..d571f837 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -1,3 +1,5 @@ +require "ldclient-rb/impl/repeating_task" + require "concurrent/atomics" require "thread" @@ -9,8 +11,8 @@ def initialize(config, requestor) @requestor = requestor @initialized = Concurrent::AtomicBoolean.new(false) @started = Concurrent::AtomicBoolean.new(false) - @stopped = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new + @task = Impl::RepeatingTask.new(@config.poll_interval, 0, -> { self.poll }, @config.logger) end def initialized? @@ -20,56 +22,35 @@ def initialized? def start return @ready unless @started.make_true @config.logger.info { "[LDClient] Initializing polling connection" } - create_worker + @task.start @ready end def stop - if @stopped.make_true - if @worker && @worker.alive? && @worker != Thread.current - @worker.run # causes the thread to wake up if it's currently in a sleep - @worker.join - end - @config.logger.info { "[LDClient] Polling connection stopped" } - end + @task.stop + @config.logger.info { "[LDClient] Polling connection stopped" } end def poll - all_data = @requestor.request_all_data - if all_data - @config.feature_store.init(all_data) - if @initialized.make_true - @config.logger.info { "[LDClient] Polling connection initialized" } - @ready.set - end - end - end - - def create_worker - @worker = Thread.new do - @config.logger.debug { "[LDClient] Starting polling worker" } - while !@stopped.value do - started_at = Time.now - begin - poll - rescue UnexpectedResponseError => e - message = Util.http_error_message(e.status, "polling request", "will retry") - @config.logger.error { "[LDClient] #{message}" }; - if !Util.http_error_recoverable?(e.status) - @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set - stop - end - rescue StandardError => exn - Util.log_exception(@config.logger, "Exception while polling", exn) - end - delta = @config.poll_interval - (Time.now - started_at) - if delta > 0 - sleep(delta) + begin + all_data = @requestor.request_all_data + if all_data + @config.feature_store.init(all_data) + if @initialized.make_true + @config.logger.info { "[LDClient] Polling connection initialized" } + @ready.set end end + rescue UnexpectedResponseError => e + message = Util.http_error_message(e.status, "polling request", "will retry") + @config.logger.error { "[LDClient] #{message}" }; + if !Util.http_error_recoverable?(e.status) + @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set + stop + end + rescue StandardError => e + Util.log_exception(@config.logger, "Exception while polling", e) end end - - private :poll, :create_worker end end diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index cfd09d8d..7bd56959 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -18,7 +18,7 @@ def self.stringify_attrs(hash, attrs) end ret end - + def self.new_http_client(uri_s, config) http_client_options = {} if config.socket_factory diff --git a/spec/big_segment_store_spec_base.rb b/spec/big_segment_store_spec_base.rb new file mode 100644 index 00000000..29f344a1 --- /dev/null +++ b/spec/big_segment_store_spec_base.rb @@ -0,0 +1,112 @@ +require "spec_helper" + +# Reusable test logic for testing BigSegmentStore implementations. +# +# Usage: +# +# class MyStoreTester +# def initialize(options) +# @options = options +# end +# def create_big_segment_store +# MyBigSegmentStoreImplClass.new(@options) +# end +# def clear_data +# # clear any existing data from the database, taking @options[:prefix] into account +# end +# def set_big_segments_metadata(metadata) +# # write the metadata to the database, taking @options[:prefix] into account +# end +# def set_big_segments(user_hash, includes, excludes) +# # update the include and exclude lists for a user, taking @options[:prefix] into account +# end +# end +# +# describe "my big segment store" do +# include_examples "big_segment_store", MyStoreTester +# end + +shared_examples "big_segment_store" do |store_tester_class| + base_options = { logger: $null_logger } + + prefix_test_groups = [ + ["with default prefix", {}], + ["with specified prefix", { prefix: "testprefix" }] + ] + prefix_test_groups.each do |subgroup_description, prefix_options| + context(subgroup_description) do + # The following tests are done for each permutation of (default prefix/specified prefix) + + let(:store_tester) { store_tester_class.new(prefix_options.merge(base_options)) } + let(:fake_user_hash) { "userhash" } + + def with_empty_store + store_tester.clear_data + ensure_stop(store_tester.create_big_segment_store) do |store| + yield store + end + end + + context "get_metadata" do + it "valid value" do + expected_timestamp = 1234567890 + with_empty_store do |store| + store_tester.set_big_segments_metadata(LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(expected_timestamp)) + + actual = store.get_metadata + + expect(actual).not_to be nil + expect(actual.last_up_to_date).to eq(expected_timestamp) + end + end + + it "no value" do + with_empty_store do |store| + actual = store.get_metadata + + expect(actual).not_to be nil + expect(actual.last_up_to_date).to be nil + end + end + end + + context "get_membership" do + it "not found" do + with_empty_store do |store| + membership = store.get_membership(fake_user_hash) + membership = {} if membership.nil? + + expect(membership).to eq({}) + end + end + + it "includes only" do + with_empty_store do |store| + store_tester.set_big_segments(fake_user_hash, ["key1", "key2"], []) + + membership = store.get_membership(fake_user_hash) + expect(membership).to eq({ "key1" => true, "key2" => true }) + end + end + + it "excludes only" do + with_empty_store do |store| + store_tester.set_big_segments(fake_user_hash, [], ["key1", "key2"]) + + membership = store.get_membership(fake_user_hash) + expect(membership).to eq({ "key1" => false, "key2" => false }) + end + end + + it "includes and excludes" do + with_empty_store do |store| + store_tester.set_big_segments(fake_user_hash, ["key1", "key2"], ["key2", "key3"]) + + membership = store.get_membership(fake_user_hash) + expect(membership).to eq({ "key1" => true, "key2" => true, "key3" => false }) # include of key2 overrides exclude + end + end + end + end + end +end diff --git a/spec/evaluation_detail_spec.rb b/spec/evaluation_detail_spec.rb index 3d7418ed..7b1b6856 100644 --- a/spec/evaluation_detail_spec.rb +++ b/spec/evaluation_detail_spec.rb @@ -41,7 +41,10 @@ module LaunchDarkly [ EvaluationReason::prerequisite_failed("x"), EvaluationReason::PREREQUISITE_FAILED, { "kind" => "PREREQUISITE_FAILED", "prerequisiteKey" => "x" }, "PREREQUISITE_FAILED(x)" ], [ EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND), EvaluationReason::ERROR, - { "kind" => "ERROR", "errorKind" => "FLAG_NOT_FOUND" }, "ERROR(FLAG_NOT_FOUND)" ] + { "kind" => "ERROR", "errorKind" => "FLAG_NOT_FOUND" }, "ERROR(FLAG_NOT_FOUND)" ], + [ EvaluationReason::fallthrough().with_big_segments_status(BigSegmentsStatus::HEALTHY), EvaluationReason::FALLTHROUGH, + { "kind" => "FALLTHROUGH", "bigSegmentsStatus" => "HEALTHY" }, "FALLTHROUGH", + [ EvaluationReason::fallthrough ] ], ] values.each_index do |i| params = values[i] @@ -108,6 +111,7 @@ module LaunchDarkly expect(EvaluationReason::rule_match(1, "x")[:ruleId]).to eq "x" expect(EvaluationReason::prerequisite_failed("x")[:prerequisiteKey]).to eq "x" expect(EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND)[:errorKind]).to eq "FLAG_NOT_FOUND" + expect(EvaluationReason::fallthrough().with_big_segments_status(BigSegmentsStatus::HEALTHY)[:bigSegmentsStatus]).to eq "HEALTHY" end it "freezes string properties" do @@ -127,9 +131,5 @@ module LaunchDarkly expect { EvaluationReason::error(nil) }.to raise_error(ArgumentError) expect { EvaluationReason::error(9) }.to raise_error(ArgumentError) end - - it "does not allow direct access to constructor" do - expect { EvaluationReason.new(:off, nil, nil, nil, nil) }.to raise_error(NoMethodError) - end end end diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 2d06f0ff..78fc8596 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -1,31 +1,56 @@ require "spec_helper" -shared_examples "feature_store" do |create_store_method, clear_data_method| - - # Rather than testing with feature flag or segment data, we'll use this fake data kind - # to make it clear that feature stores need to be able to handle arbitrary data. - let(:things_kind) { { namespace: "things" } } - - let(:key1) { "thing1" } - let(:thing1) { - { - key: key1, - name: "Thing 1", - version: 11, - deleted: false - } - } - let(:unused_key) { "no" } +# Reusable test logic for testing FeatureStore implementations. +# +# Usage: +# +# 1. For a persistent store (database integration) +# class MyStoreTester +# def initialize(options) +# @options = options # the test logic will pass in options like prefix and expiration +# end +# def create_feature_store +# MyFeatureStoreClass.new_feature_store(@options) +# end +# def clear_data +# # clear any existing data from the database, taking @options[:prefix] into account if any +# end +# end +# +# describe "my persistent feature store" do +# include_examples "persistent_feature_store", MyStoreTester +# end +# +# 2. For a non-persistent store (the in-memory implementation) +# class MyStoreTester +# def create_feature_store +# MyFeatureStoreClass.new_feature_store(@options) +# end +# end +# +# describe "my feature store" do +# include_examples "any_feature_store", MyStoreTester.new +# end - let(:create_store) { create_store_method } # just to avoid a scope issue - let(:clear_data) { clear_data_method } +# Rather than testing with feature flag or segment data, we'll use this fake data kind +# to make it clear that feature stores need to be able to handle arbitrary data. +$things_kind = { namespace: "things" } - def with_store(opts = {}) - s = create_store.call(opts) - begin - yield s - ensure - s.stop +$key1 = "$thing1" +$thing1 = { + key: $key1, + name: "Thing 1", + version: 11, + deleted: false +} +$unused_key = "no" + +shared_examples "any_feature_store" do |store_tester| + let(:store_tester) { store_tester } + + def with_store() + ensure_stop(store_tester.create_feature_store) do |store| + yield store end end @@ -34,7 +59,7 @@ def with_inited_store(things) things.each { |thing| things_hash[thing[:key].to_sym] = thing } with_store do |s| - s.init({ things_kind => things_hash }) + s.init({ $things_kind => things_hash }) yield s end end @@ -43,49 +68,9 @@ def new_version_plus(f, deltaVersion, attrs = {}) f.clone.merge({ version: f[:version] + deltaVersion }).merge(attrs) end - before(:each) do - clear_data.call if !clear_data.nil? - end - - # This block of tests is only run if the clear_data method is defined, meaning that this is a persistent store - # that operates on a database that can be shared with other store instances (as opposed to the in-memory store, - # which has its own private storage). - if !clear_data_method.nil? - it "is not initialized by default" do - with_store do |store| - expect(store.initialized?).to eq false - end - end - - it "can detect if another instance has initialized the store" do - with_store do |store1| - store1.init({}) - with_store do |store2| - expect(store2.initialized?).to eq true - end - end - end - - it "can read data written by another instance" do - with_store do |store1| - store1.init({ things_kind => { key1.to_sym => thing1 } }) - with_store do |store2| - expect(store2.get(things_kind, key1)).to eq thing1 - end - end - end - - it "is independent from other stores with different prefixes" do - with_store({ prefix: "a" }) do |store_a| - store_a.init({ things_kind => { key1.to_sym => thing1 } }) - with_store({ prefix: "b" }) do |store_b| - store_b.init({ things_kind => {} }) - end - with_store({ prefix: "b" }) do |store_b1| # this ensures we're not just reading cached data - expect(store_b1.get(things_kind, key1)).to be_nil - expect(store_a.get(things_kind, key1)).to eq thing1 - end - end + it "is not initialized by default" do + with_store do |store| + expect(store.initialized?).to eq false end end @@ -96,27 +81,27 @@ def new_version_plus(f, deltaVersion, attrs = {}) end it "can get existing item with symbol key" do - with_inited_store([ thing1 ]) do |store| - expect(store.get(things_kind, key1.to_sym)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + expect(store.get($things_kind, $key1.to_sym)).to eq $thing1 end end it "can get existing item with string key" do - with_inited_store([ thing1 ]) do |store| - expect(store.get(things_kind, key1.to_s)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + expect(store.get($things_kind, $key1.to_s)).to eq $thing1 end end it "gets nil for nonexisting item" do - with_inited_store([ thing1 ]) do |store| - expect(store.get(things_kind, unused_key)).to be_nil + with_inited_store([ $thing1 ]) do |store| + expect(store.get($things_kind, $unused_key)).to be_nil end end it "returns nil for deleted item" do - deleted_thing = thing1.clone.merge({ deleted: true }) + deleted_thing = $thing1.clone.merge({ deleted: true }) with_inited_store([ deleted_thing ]) do |store| - expect(store.get(things_kind, key1)).to be_nil + expect(store.get($things_kind, $key1)).to be_nil end end @@ -128,8 +113,8 @@ def new_version_plus(f, deltaVersion, attrs = {}) version: 22, deleted: false } - with_inited_store([ thing1, thing2 ]) do |store| - expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1, key2.to_sym => thing2 }) + with_inited_store([ $thing1, thing2 ]) do |store| + expect(store.all($things_kind)).to eq ({ $key1.to_sym => $thing1, key2.to_sym => thing2 }) end end @@ -141,60 +126,60 @@ def new_version_plus(f, deltaVersion, attrs = {}) version: 22, deleted: true } - with_inited_store([ thing1, thing2 ]) do |store| - expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1 }) + with_inited_store([ $thing1, thing2 ]) do |store| + expect(store.all($things_kind)).to eq ({ $key1.to_sym => $thing1 }) end end it "can add new item" do with_inited_store([]) do |store| - store.upsert(things_kind, thing1) - expect(store.get(things_kind, key1)).to eq thing1 + store.upsert($things_kind, $thing1) + expect(store.get($things_kind, $key1)).to eq $thing1 end end it "can update item with newer version" do - with_inited_store([ thing1 ]) do |store| - thing1_mod = new_version_plus(thing1, 1, { name: thing1[:name] + ' updated' }) - store.upsert(things_kind, thing1_mod) - expect(store.get(things_kind, key1)).to eq thing1_mod + with_inited_store([ $thing1 ]) do |store| + $thing1_mod = new_version_plus($thing1, 1, { name: $thing1[:name] + ' updated' }) + store.upsert($things_kind, $thing1_mod) + expect(store.get($things_kind, $key1)).to eq $thing1_mod end end it "cannot update item with same version" do - with_inited_store([ thing1 ]) do |store| - thing1_mod = thing1.clone.merge({ name: thing1[:name] + ' updated' }) - store.upsert(things_kind, thing1_mod) - expect(store.get(things_kind, key1)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + $thing1_mod = $thing1.clone.merge({ name: $thing1[:name] + ' updated' }) + store.upsert($things_kind, $thing1_mod) + expect(store.get($things_kind, $key1)).to eq $thing1 end end it "cannot update feature with older version" do - with_inited_store([ thing1 ]) do |store| - thing1_mod = new_version_plus(thing1, -1, { name: thing1[:name] + ' updated' }) - store.upsert(things_kind, thing1_mod) - expect(store.get(things_kind, key1)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + $thing1_mod = new_version_plus($thing1, -1, { name: $thing1[:name] + ' updated' }) + store.upsert($things_kind, $thing1_mod) + expect(store.get($things_kind, $key1)).to eq $thing1 end end it "can delete item with newer version" do - with_inited_store([ thing1 ]) do |store| - store.delete(things_kind, key1, thing1[:version] + 1) - expect(store.get(things_kind, key1)).to be_nil + with_inited_store([ $thing1 ]) do |store| + store.delete($things_kind, $key1, $thing1[:version] + 1) + expect(store.get($things_kind, $key1)).to be_nil end end it "cannot delete item with same version" do - with_inited_store([ thing1 ]) do |store| - store.delete(things_kind, key1, thing1[:version]) - expect(store.get(things_kind, key1)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + store.delete($things_kind, $key1, $thing1[:version]) + expect(store.get($things_kind, $key1)).to eq $thing1 end end it "cannot delete item with older version" do - with_inited_store([ thing1 ]) do |store| - store.delete(things_kind, key1, thing1[:version] - 1) - expect(store.get(things_kind, key1)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + store.delete($things_kind, $key1, $thing1[:version] - 1) + expect(store.get($things_kind, $key1)).to eq $thing1 end end @@ -211,3 +196,77 @@ def new_version_plus(f, deltaVersion, attrs = {}) end end end + +shared_examples "persistent_feature_store" do |store_tester_class| + base_options = { logger: $null_logger } + + # We'll loop through permutations of the following parameters. Note: in the future, the caching logic will + # be separated out and implemented at a higher level of the SDK, so we won't have to test it for individual + # persistent store implementations. Currently caching *is* implemented in a shared class (CachingStoreWrapper), + # but the individual store implementations are wrapping themselves in that class, so they can't be tested + # separately from it. + + caching_test_groups = [ + ["with caching", { expiration: 60 }], + ["without caching", { expiration: 0 }] + ] + prefix_test_groups = [ + ["with default prefix", {}], + ["with specified prefix", { prefix: "testprefix" }] + ] + + caching_test_groups.each do |test_group_description, caching_options| + context(test_group_description) do + + prefix_test_groups.each do |subgroup_description, prefix_options| + # The following tests are done for each permutation of (caching/no caching) and (default prefix/specified prefix) + context(subgroup_description) do + options = caching_options.merge(prefix_options).merge(base_options) + + store_tester = store_tester_class.new(base_options) + + before(:each) { store_tester.clear_data } + + include_examples "any_feature_store", store_tester + + it "can detect if another instance has initialized the store" do + ensure_stop(store_tester.create_feature_store) do |store1| + store1.init({}) + ensure_stop(store_tester.create_feature_store) do |store2| + expect(store2.initialized?).to eq true + end + end + end + + it "can read data written by another instance" do + ensure_stop(store_tester.create_feature_store) do |store1| + store1.init({ $things_kind => { $key1.to_sym => $thing1 } }) + ensure_stop(store_tester.create_feature_store) do |store2| + expect(store2.get($things_kind, $key1)).to eq $thing1 + end + end + end + end + end + + # The following tests are done for each permutation of (caching/no caching) + it "is independent from other stores with different prefixes" do + factory_a = store_tester_class.new({ prefix: "a" }.merge(caching_options).merge(base_options)) + factory_b = store_tester_class.new({ prefix: "b" }.merge(caching_options).merge(base_options)) + factory_a.clear_data + factory_b.clear_data + + ensure_stop(factory_a.create_feature_store) do |store_a| + store_a.init({ $things_kind => { $key1.to_sym => $thing1 } }) + ensure_stop(factory_b.create_feature_store) do |store_b1| + store_b1.init({ $things_kind => {} }) + end + ensure_stop(factory_b.create_feature_store) do |store_b2| # this ensures we're not just reading cached data + expect(store_b2.get($things_kind, $key1)).to be_nil + expect(store_a.get($things_kind, $key1)).to eq $thing1 + end + end + end + end + end +end diff --git a/spec/impl/big_segments_spec.rb b/spec/impl/big_segments_spec.rb new file mode 100644 index 00000000..89637653 --- /dev/null +++ b/spec/impl/big_segments_spec.rb @@ -0,0 +1,225 @@ +require "ldclient-rb/config" +require "ldclient-rb/impl/big_segments" + +require "concurrent/atomics" + +require "mock_components" +require "spec_helper" + +module LaunchDarkly + module Impl + describe BigSegmentStoreManager do + subject { BigSegmentStoreManager } + + let(:user_key) { 'userkey' } + let(:user_hash) { subject.hash_for_user_key(user_key) } + let(:null_logger) { double.as_null_object } + + def always_up_to_date + Interfaces::BigSegmentStoreMetadata.new(Util.current_time_millis) + end + + def always_stale + Interfaces::BigSegmentStoreMetadata.new(0) + end + + def with_manager(config) + manager = subject.new(config, null_logger) + begin + yield manager + ensure + manager.stop + end + end + + context "membership query" do + it "with uncached result and healthy status" do + expected_membership = { 'key1' => true, 'key2' => true } + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) + expect(store).to receive(:get_membership).with(user_hash).and_return(expected_membership) + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store)) do |m| + expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::HEALTHY) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + + it "with cached result and healthy status" do + expected_membership = { 'key1' => true, 'key2' => true } + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) + expect(store).to receive(:get_membership).with(user_hash).once.and_return(expected_membership) + # the ".once" on this mock expectation is what verifies that the cache is working; there should only be one query + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store)) do |m| + expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::HEALTHY) + expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + + it "can cache a nil result" do + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) + expect(store).to receive(:get_membership).with(user_hash).once.and_return(nil) + # the ".once" on this mock expectation is what verifies that the cache is working; there should only be one query + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store)) do |m| + expected_result = BigSegmentMembershipResult.new({}, BigSegmentsStatus::HEALTHY) + expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + + it "cache can expire" do + expected_membership = { 'key1' => true, 'key2' => true } + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) + expect(store).to receive(:get_membership).with(user_hash).twice.and_return(expected_membership) + # the ".twice" on this mock expectation is what verifies that the cached result expired + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store, user_cache_time: 0.01)) do |m| + expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::HEALTHY) + expect(m.get_user_membership(user_key)).to eq(expected_result) + sleep(0.1) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + + it "with stale status" do + expected_membership = { 'key1' => true, 'key2' => true } + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_stale) + expect(store).to receive(:get_membership).with(user_hash).and_return(expected_membership) + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store)) do |m| + expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::STALE) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + + it "with stale status due to no store metadata" do + expected_membership = { 'key1' => true, 'key2' => true } + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(nil) + expect(store).to receive(:get_membership).with(user_hash).and_return(expected_membership) + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store)) do |m| + expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::STALE) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + + it "least recent user is evicted from cache" do + user_key_1, user_key_2, user_key_3 = 'userkey1', 'userkey2', 'userkey3' + user_hash_1, user_hash_2, user_hash_3 = subject.hash_for_user_key(user_key_1), + subject.hash_for_user_key(user_key_2), subject.hash_for_user_key(user_key_3) + memberships = { + user_hash_1 => { 'seg1': true }, + user_hash_2 => { 'seg2': true }, + user_hash_3 => { 'seg3': true } + } + queried_users = [] + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) + expect(store).to receive(:get_membership).exactly(4).times do |key| + queried_users << key + memberships[key] + end + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store, user_cache_size: 2)) do |m| + result1 = m.get_user_membership(user_key_1) + result2 = m.get_user_membership(user_key_2) + result3 = m.get_user_membership(user_key_3) + expect(result1).to eq(BigSegmentMembershipResult.new(memberships[user_hash_1], BigSegmentsStatus::HEALTHY)) + expect(result2).to eq(BigSegmentMembershipResult.new(memberships[user_hash_2], BigSegmentsStatus::HEALTHY)) + expect(result3).to eq(BigSegmentMembershipResult.new(memberships[user_hash_3], BigSegmentsStatus::HEALTHY)) + + expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3]) + + # Since the capacity is only 2 and user_key_1 was the least recently used, that key should be + # evicted by the user_key_3 query. Now only user_key_2 and user_key_3 are in the cache, and + # querying them again should not cause a new query to the store. + + result2a = m.get_user_membership(user_key_2) + result3a = m.get_user_membership(user_key_3) + expect(result2a).to eq(result2) + expect(result3a).to eq(result3) + + expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3]) + + result1a = m.get_user_membership(user_key_1) + expect(result1a).to eq(result1) + + expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3, user_hash_1]) + end + end + end + + context "status polling" do + it "detects store unavailability" do + store = double + should_fail = Concurrent::AtomicBoolean.new(false) + expect(store).to receive(:get_metadata).at_least(:once) do + throw "sorry" if should_fail.value + always_up_to_date + end + allow(store).to receive(:stop) + + statuses = Queue.new + with_manager(BigSegmentsConfig.new(store: store, status_poll_interval: 0.01)) do |m| + m.status_provider.add_observer(SimpleObserver.new(->(value) { statuses << value })) + + status1 = statuses.pop() + expect(status1.available).to be(true) + + should_fail.make_true + + status2 = statuses.pop() + expect(status2.available).to be(false) + + should_fail.make_false + + status3 = statuses.pop() + expect(status3.available).to be(true) + end + end + + it "detects stale status" do + store = double + should_be_stale = Concurrent::AtomicBoolean.new(false) + expect(store).to receive(:get_metadata).at_least(:once) do + should_be_stale.value ? always_stale : always_up_to_date + end + allow(store).to receive(:stop) + + statuses = Queue.new + with_manager(BigSegmentsConfig.new(store: store, status_poll_interval: 0.01)) do |m| + m.status_provider.add_observer(SimpleObserver.new(->(value) { statuses << value })) + + status1 = statuses.pop() + expect(status1.stale).to be(false) + + should_be_stale.make_true + + status2 = statuses.pop() + expect(status2.stale).to be(true) + + should_be_stale.make_false + + status3 = statuses.pop() + expect(status3.stale).to be(false) + end + end + end + end + end +end diff --git a/spec/impl/evaluator_big_segments_spec.rb b/spec/impl/evaluator_big_segments_spec.rb new file mode 100644 index 00000000..b8a9e2e4 --- /dev/null +++ b/spec/impl/evaluator_big_segments_spec.rb @@ -0,0 +1,160 @@ +require "ldclient-rb/impl/big_segments" + +require "spec_helper" +require "impl/evaluator_spec_base" + +module LaunchDarkly + module Impl + describe "Evaluator (big segments)", :evaluator_spec_base => true do + subject { Evaluator } + + it "segment is not matched if there is no way to query it" do + segment = { + key: 'test', + included: [ user[:key] ], # included should be ignored for a big segment + version: 1, + unbounded: true, + generation: 1 + } + e = EvaluatorBuilder.new(logger). + with_segment(segment). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be false + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) + end + + it "segment with no generation is not matched" do + segment = { + key: 'test', + included: [ user[:key] ], # included should be ignored for a big segment + version: 1, + unbounded: true + } + e = EvaluatorBuilder.new(logger). + with_segment(segment). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be false + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) + end + + it "matched with include" do + segment = { + key: 'test', + version: 1, + unbounded: true, + generation: 2 + } + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, true). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + end + + it "matched with rule" do + segment = { + key: 'test', + version: 1, + unbounded: true, + generation: 2, + rules: [ + { clauses: [ make_user_matching_clause(user) ] } + ] + } + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, nil). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + end + + it "unmatched by exclude regardless of rule" do + segment = { + key: 'test', + version: 1, + unbounded: true, + generation: 2, + rules: [ + { clauses: [ make_user_matching_clause(user) ] } + ] + }; + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, false). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be false + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + end + + it "status is returned from provider" do + segment = { + key: 'test', + version: 1, + unbounded: true, + generation: 2 + } + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, true). + with_big_segments_status(BigSegmentsStatus::STALE). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::STALE) + end + + it "queries state only once per user even if flag references multiple segments" do + segment1 = { + key: 'segmentkey1', + version: 1, + unbounded: true, + generation: 2 + } + segment2 = { + key: 'segmentkey2', + version: 1, + unbounded: true, + generation: 3 + } + flag = { + key: 'key', + on: true, + fallthrough: { variation: 0 }, + variations: [ false, true ], + rules: [ + { variation: 1, clauses: [ make_segment_match_clause(segment1) ]}, + { variation: 1, clauses: [ make_segment_match_clause(segment2) ]} + ] + } + + queries = [] + e = EvaluatorBuilder.new(logger). + with_segment(segment1).with_segment(segment2). + with_big_segment_for_user(user, segment2, true). + record_big_segments_queries(queries). + build + # The membership deliberately does not include segment1, because we want the first rule to be + # a non-match so that it will continue on and check segment2 as well. + + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + + expect(queries).to eq([ user[:key] ]) + end + end + end +end diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb index 64fb1bc7..5cd85148 100644 --- a/spec/impl/evaluator_segment_spec.rb +++ b/spec/impl/evaluator_segment_spec.rb @@ -9,7 +9,7 @@ module Impl def test_segment_match(segment) clause = make_segment_match_clause(segment) flag = boolean_flag_with_clauses([clause]) - e = Evaluator.new(get_nothing, get_things({ segment[:key] => segment }), logger) + e = EvaluatorBuilder.new(logger).with_segment(segment).build e.evaluate(flag, user, factory).detail.value end @@ -20,17 +20,13 @@ def test_segment_match(segment) version: 1, deleted: false } - get_segment = get_things({ 'segkey' => segment }) - e = subject.new(get_nothing, get_segment, logger) - user = { key: 'userkey' } - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) + e = EvaluatorBuilder.new(logger).with_segment(segment).build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) expect(e.evaluate(flag, user, factory).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do - e = subject.new(get_nothing, get_things({ 'segkey' => nil }), logger) - user = { key: 'userkey' } + e = EvaluatorBuilder.new(logger).with_unknown_segment('segkey').build clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } flag = boolean_flag_with_clauses([clause]) expect(e.evaluate(flag, user, factory).detail.value).to be false diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 543b524d..15766866 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -79,7 +79,7 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('badfeature')) - e = subject.new(get_things( 'badfeature' => nil ), get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -96,7 +96,7 @@ module Impl } Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached reason user = { key: 'x' } - e = subject.new(get_things( 'badfeature' => nil ), get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build result1 = e.evaluate(flag, user, factory) expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') result2 = e.evaluate(flag, user, factory) @@ -126,8 +126,7 @@ module Impl events_should_be = [{ kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' }] - get_flag = get_things('feature1' => flag1, 'feature2' => nil) - e = subject.new(get_flag, get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_flag(flag1).with_unknown_flag('feature2').build result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) @@ -157,8 +156,7 @@ module Impl events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] - get_flag = get_things({ 'feature1' => flag1 }) - e = subject.new(get_flag, get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_flag(flag1).build result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) @@ -186,8 +184,7 @@ module Impl events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' }] - get_flag = get_things({ 'feature1' => flag1 }) - e = subject.new(get_flag, get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_flag(flag1).build result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) @@ -215,8 +212,7 @@ module Impl events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] - get_flag = get_things({ 'feature1' => flag1 }) - e = subject.new(get_flag, get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_flag(flag1).build result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index fa8b86c3..da8662ac 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -1,7 +1,79 @@ +require "ldclient-rb/impl/big_segments" + require "spec_helper" module LaunchDarkly module Impl + class EvaluatorBuilder + def initialize(logger) + @flags = {} + @segments = {} + @big_segment_memberships = {} + @big_segments_status = BigSegmentsStatus::HEALTHY + @big_segments_queries = [] + @logger = logger + end + + def with_flag(flag) + @flags[flag[:key]] = flag + self + end + + def with_unknown_flag(key) + @flags[key] = nil + self + end + + def with_segment(segment) + @segments[segment[:key]] = segment + self + end + + def with_unknown_segment(key) + @segments[key] = nil + self + end + + def with_big_segment_for_user(user, segment, included) + user_key = user[:key] + @big_segment_memberships[user_key] = {} if !@big_segment_memberships.has_key?(user_key) + @big_segment_memberships[user_key][Evaluator.make_big_segment_ref(segment)] = included + self + end + + def with_big_segments_status(status) + @big_segments_status = status + self + end + + def record_big_segments_queries(destination) + @big_segments_queries = destination + self + end + + def build + Evaluator.new(method(:get_flag), method(:get_segment), + @big_segment_memberships.empty? ? nil : method(:get_big_segments), + @logger) + end + + private def get_flag(key) + raise "should not have requested flag #{key}" if !@flags.has_key?(key) + @flags[key] + end + + private def get_segment(key) + raise "should not have requested segment #{key}" if !@segments.has_key?(key) + @segments[key] + end + + private def get_big_segments(user_key) + raise "should not have requested big segments for #{user_key}" if !@big_segment_memberships.has_key?(user_key) + @big_segments_queries << user_key + BigSegmentMembershipResult.new(@big_segment_memberships[user_key], @big_segments_status) + end + end + module EvaluatorSpecBase def factory EventFactory.new(false) @@ -19,19 +91,8 @@ def logger ::Logger.new($stdout, level: ::Logger::FATAL) end - def get_nothing - lambda { |key| raise "should not have requested #{key}" } - end - - def get_things(map) - lambda { |key| - raise "should not have requested #{key}" if !map.has_key?(key) - map[key] - } - end - def basic_evaluator - subject.new(get_nothing, get_nothing, logger) + EvaluatorBuilder.new(logger).build end def boolean_flag_with_rules(rules) @@ -42,7 +103,7 @@ def boolean_flag_with_clauses(clauses) boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) end - def make_user_matching_clause(user, attr) + def make_user_matching_clause(user, attr = :key) { attribute: attr.to_s, op: :in, diff --git a/spec/impl/repeating_task_spec.rb b/spec/impl/repeating_task_spec.rb new file mode 100644 index 00000000..ba780d78 --- /dev/null +++ b/spec/impl/repeating_task_spec.rb @@ -0,0 +1,78 @@ +require "ldclient-rb/impl/repeating_task" + +require "concurrent/atomics" + +require "spec_helper" + +module LaunchDarkly + module Impl + describe RepeatingTask do + def null_logger + double().as_null_object + end + + it "does not start when created" do + signal = Concurrent::Event.new + task = RepeatingTask.new(0.01, 0, -> { signal.set }, null_logger) + begin + expect(signal.wait(0.1)).to be false + ensure + task.stop + end + end + + it "executes until stopped" do + queue = Queue.new + task = RepeatingTask.new(0.1, 0, -> { queue << Time.now }, null_logger) + begin + last = nil + task.start + 3.times do + time = queue.pop + if !last.nil? + expect(time.to_f - last.to_f).to be >=(0.05) + end + last = time + end + ensure + task.stop + stopped_time = Time.now + end + no_more_items = false + 2.times do + begin + time = queue.pop(true) + expect(time.to_f).to be <=(stopped_time.to_f) + rescue ThreadError + no_more_items = true + break + end + end + expect(no_more_items).to be true + end + + it "can be stopped from within the task" do + counter = 0 + stopped = Concurrent::Event.new + task = RepeatingTask.new(0.01, 0, + -> { + counter += 1 + if counter >= 2 + task.stop + stopped.set + end + }, + null_logger) + begin + task.start + expect(stopped.wait(0.1)).to be true + expect(counter).to be 2 + sleep(0.1) + expect(counter).to be 2 + ensure + task.stop + end + end + end + end +end diff --git a/spec/in_memory_feature_store_spec.rb b/spec/in_memory_feature_store_spec.rb index c403fc69..1d56078f 100644 --- a/spec/in_memory_feature_store_spec.rb +++ b/spec/in_memory_feature_store_spec.rb @@ -1,12 +1,14 @@ require "feature_store_spec_base" require "spec_helper" -def create_in_memory_store(opts = {}) - LaunchDarkly::InMemoryFeatureStore.new +class InMemoryStoreTester + def create_feature_store + LaunchDarkly::InMemoryFeatureStore.new + end end describe LaunchDarkly::InMemoryFeatureStore do subject { LaunchDarkly::InMemoryFeatureStore } - - include_examples "feature_store", method(:create_in_memory_store) + + include_examples "any_feature_store", InMemoryStoreTester.new end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index bad1e736..e73858fa 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -2,39 +2,34 @@ require "diplomat" require "spec_helper" - -$my_prefix = 'testprefix' +# These tests will all fail if there isn't a local Consul instance running. +# They can be disabled with LD_SKIP_DATABASE_TESTS=1 $consul_base_opts = { prefix: $my_prefix, logger: $null_log } -def create_consul_store(opts = {}) - LaunchDarkly::Integrations::Consul::new_feature_store( - $consul_base_opts.merge(opts).merge({ expiration: 60 })) -end +class ConsulStoreTester + def initialize(options) + @options = options + @actual_prefix = @options[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix + end -def create_consul_store_uncached(opts = {}) - LaunchDarkly::Integrations::Consul::new_feature_store( - $consul_base_opts.merge(opts).merge({ expiration: 0 })) -end + def clear_data + Diplomat::Kv.delete(@actual_prefix + '/', recurse: true) + end -def clear_all_data - Diplomat::Kv.delete($my_prefix + '/', recurse: true) + def create_feature_store + LaunchDarkly::Integrations::Consul.new_feature_store(@options) + end end describe "Consul feature store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' - # These tests will all fail if there isn't a local Consul instance running. - - context "with local cache" do - include_examples "feature_store", method(:create_consul_store), method(:clear_all_data) - end - - context "without local cache" do - include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data) - end + include_examples "persistent_feature_store", ConsulStoreTester end + +# There isn't a Big Segments integration for Consul. diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb deleted file mode 100644 index 3b95edc8..00000000 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ /dev/null @@ -1,103 +0,0 @@ -require "feature_store_spec_base" -require "aws-sdk-dynamodb" -require "spec_helper" - - -$table_name = 'LD_DYNAMODB_TEST_TABLE' -$endpoint = 'http://localhost:8000' -$my_prefix = 'testprefix' - -$dynamodb_opts = { - credentials: Aws::Credentials.new("key", "secret"), - region: "us-east-1", - endpoint: $endpoint -} - -$ddb_base_opts = { - dynamodb_opts: $dynamodb_opts, - prefix: $my_prefix, - logger: $null_log -} - -def create_dynamodb_store(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, - $ddb_base_opts.merge(opts).merge({ expiration: 60 })) -end - -def create_dynamodb_store_uncached(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, - $ddb_base_opts.merge(opts).merge({ expiration: 0 })) -end - -def clear_all_data - client = create_test_client - items_to_delete = [] - req = { - table_name: $table_name, - projection_expression: '#namespace, #key', - expression_attribute_names: { - '#namespace' => 'namespace', - '#key' => 'key' - } - } - while true - resp = client.scan(req) - items_to_delete = items_to_delete + resp.items - break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 - req.exclusive_start_key = resp.last_evaluated_key - end - requests = items_to_delete.map do |item| - { delete_request: { key: item } } - end - LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBUtil.batch_write_requests(client, $table_name, requests) -end - -def create_table_if_necessary - client = create_test_client - begin - client.describe_table({ table_name: $table_name }) - return # no error, table exists - rescue Aws::DynamoDB::Errors::ResourceNotFoundException - # fall through to code below - we'll create the table - end - - req = { - table_name: $table_name, - key_schema: [ - { attribute_name: "namespace", key_type: "HASH" }, - { attribute_name: "key", key_type: "RANGE" } - ], - attribute_definitions: [ - { attribute_name: "namespace", attribute_type: "S" }, - { attribute_name: "key", attribute_type: "S" } - ], - provisioned_throughput: { - read_capacity_units: 1, - write_capacity_units: 1 - } - } - client.create_table(req) - - # When DynamoDB creates a table, it may not be ready to use immediately -end - -def create_test_client - Aws::DynamoDB::Client.new($dynamodb_opts) -end - - -describe "DynamoDB feature store" do - break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' - - # These tests will all fail if there isn't a local DynamoDB instance running. - - create_table_if_necessary - - context "with local cache" do - include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data) - end - - context "without local cache" do - include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) - end -end diff --git a/spec/integrations/dynamodb_stores_spec.rb b/spec/integrations/dynamodb_stores_spec.rb new file mode 100644 index 00000000..8f7c5c07 --- /dev/null +++ b/spec/integrations/dynamodb_stores_spec.rb @@ -0,0 +1,150 @@ +require "big_segment_store_spec_base" +require "feature_store_spec_base" +require "aws-sdk-dynamodb" +require "spec_helper" + +# These tests will all fail if there isn't a local DynamoDB instance running. +# They can be disabled with LD_SKIP_DATABASE_TESTS=1 + +$DynamoDBBigSegmentStore = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBBigSegmentStore + +class DynamoDBStoreTester + TABLE_NAME = 'LD_DYNAMODB_TEST_TABLE' + DYNAMODB_OPTS = { + credentials: Aws::Credentials.new("key", "secret"), + region: "us-east-1", + endpoint: "http://localhost:8000" + } + FEATURE_STORE_BASE_OPTS = { + dynamodb_opts: DYNAMODB_OPTS, + prefix: 'testprefix', + logger: $null_log + } + + def initialize(options = {}) + @options = options.clone + @options[:dynamodb_opts] = DYNAMODB_OPTS + @actual_prefix = options[:prefix] ? "#{options[:prefix]}:" : "" + end + + def self.create_test_client + Aws::DynamoDB::Client.new(DYNAMODB_OPTS) + end + + def self.create_table_if_necessary + client = create_test_client + begin + client.describe_table({ table_name: TABLE_NAME }) + return # no error, table exists + rescue Aws::DynamoDB::Errors::ResourceNotFoundException + # fall through to code below - we'll create the table + end + + req = { + table_name: TABLE_NAME, + key_schema: [ + { attribute_name: "namespace", key_type: "HASH" }, + { attribute_name: "key", key_type: "RANGE" } + ], + attribute_definitions: [ + { attribute_name: "namespace", attribute_type: "S" }, + { attribute_name: "key", attribute_type: "S" } + ], + provisioned_throughput: { + read_capacity_units: 1, + write_capacity_units: 1 + } + } + client.create_table(req) + + # When DynamoDB creates a table, it may not be ready to use immediately + end + + def clear_data + client = self.class.create_test_client + items_to_delete = [] + req = { + table_name: TABLE_NAME, + projection_expression: '#namespace, #key', + expression_attribute_names: { + '#namespace' => 'namespace', + '#key' => 'key' + } + } + while true + resp = client.scan(req) + resp.items.each do |item| + if !@actual_prefix || item["namespace"].start_with?(@actual_prefix) + items_to_delete.push(item) + end + end + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + requests = items_to_delete.map do |item| + { delete_request: { key: item } } + end + LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBUtil.batch_write_requests(client, TABLE_NAME, requests) + end + + def create_feature_store + LaunchDarkly::Integrations::DynamoDB::new_feature_store(TABLE_NAME, @options) + end + + def create_big_segment_store + LaunchDarkly::Integrations::DynamoDB::new_big_segment_store(TABLE_NAME, @options) + end + + def set_big_segments_metadata(metadata) + client = self.class.create_test_client + key = @actual_prefix + $DynamoDBBigSegmentStore::KEY_METADATA + client.put_item( + table_name: TABLE_NAME, + item: { + "namespace" => key, + "key" => key, + $DynamoDBBigSegmentStore::ATTR_SYNC_TIME => metadata.last_up_to_date + } + ) + end + + def set_big_segments(user_hash, includes, excludes) + client = self.class.create_test_client + sets = { + $DynamoDBBigSegmentStore::ATTR_INCLUDED => Set.new(includes), + $DynamoDBBigSegmentStore::ATTR_EXCLUDED => Set.new(excludes) + } + sets.each do |attr_name, values| + if !values.empty? + client.update_item( + table_name: TABLE_NAME, + key: { + "namespace" => @actual_prefix + $DynamoDBBigSegmentStore::KEY_USER_DATA, + "key" => user_hash + }, + update_expression: "ADD #{attr_name} :value", + expression_attribute_values: { + ":value" => values + } + ) + end + end + end +end + + +describe "DynamoDB feature store" do + break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + + DynamoDBStoreTester.create_table_if_necessary + + include_examples "persistent_feature_store", DynamoDBStoreTester +end + +describe "DynamoDB big segment store" do + break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + + DynamoDBStoreTester.create_table_if_necessary + + include_examples "big_segment_store", DynamoDBStoreTester +end diff --git a/spec/file_data_source_spec.rb b/spec/integrations/file_data_source_spec.rb similarity index 96% rename from spec/file_data_source_spec.rb rename to spec/integrations/file_data_source_spec.rb index 212d057b..ce756fb6 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/integrations/file_data_source_spec.rb @@ -9,7 +9,7 @@ def []=(key, value) end end -describe LaunchDarkly::FileDataSource do +describe LaunchDarkly::Integrations::FileData do let(:full_flag_1_key) { "flag1" } let(:full_flag_1_value) { "on" } let(:flag_value_1_key) { "flag2" } @@ -114,7 +114,7 @@ def make_temp_file(content) end def with_data_source(options) - factory = LaunchDarkly::FileDataSource.factory(options) + factory = LaunchDarkly::Integrations::FileData.data_source(options) ds = factory.call('', @config) begin yield ds @@ -246,7 +246,7 @@ def test_auto_reload(options) it "evaluates simplified flag with client as expected" do file = make_temp_file(all_properties_json) - factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) + factory = LaunchDarkly::Integrations::FileData.data_source({ paths: file.path }) config = LaunchDarkly::Config.new(send_events: false, data_source: factory) client = LaunchDarkly::LDClient.new('sdkKey', config) @@ -260,7 +260,7 @@ def test_auto_reload(options) it "evaluates full flag with client as expected" do file = make_temp_file(all_properties_json) - factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) + factory = LaunchDarkly::Integrations::FileData.data_source({ paths: file.path }) config = LaunchDarkly::Config.new(send_events: false, data_source: factory) client = LaunchDarkly::LDClient.new('sdkKey', config) diff --git a/spec/integrations/redis_stores_spec.rb b/spec/integrations/redis_stores_spec.rb new file mode 100644 index 00000000..4f26cbb0 --- /dev/null +++ b/spec/integrations/redis_stores_spec.rb @@ -0,0 +1,152 @@ +require "ldclient-rb/impl/integrations/redis_impl" + +require "big_segment_store_spec_base" +require "feature_store_spec_base" +require "spec_helper" + +require "redis" + +# These tests will all fail if there isn't a local Redis instance running. +# They can be disabled with LD_SKIP_DATABASE_TESTS=1 + +$RedisBigSegmentStore = LaunchDarkly::Impl::Integrations::Redis::RedisBigSegmentStore + +def with_redis_test_client + ensure_close(Redis.new({ url: "redis://localhost:6379" })) do |client| + yield client + end +end + + +class RedisStoreTester + def initialize(options) + @options = options + @actual_prefix = @options[:prefix] ||LaunchDarkly::Integrations::Redis.default_prefix + end + + def clear_data + with_redis_test_client do |client| + keys = client.keys("#{@actual_prefix}:*") + keys.each { |key| client.del(key) } + end + end + + def create_feature_store + LaunchDarkly::Integrations::Redis::new_feature_store(@options) + end + + def create_big_segment_store + LaunchDarkly::Integrations::Redis.new_big_segment_store(@options) + end + + def set_big_segments_metadata(metadata) + with_redis_test_client do |client| + client.set(@actual_prefix + $RedisBigSegmentStore::KEY_LAST_UP_TO_DATE, + metadata.last_up_to_date.nil? ? "" : metadata.last_up_to_date.to_s) + end + end + + def set_big_segments(user_hash, includes, excludes) + with_redis_test_client do |client| + includes.each do |ref| + client.sadd(@actual_prefix + $RedisBigSegmentStore::KEY_USER_INCLUDE + user_hash, ref) + end + excludes.each do |ref| + client.sadd(@actual_prefix + $RedisBigSegmentStore::KEY_USER_EXCLUDE + user_hash, ref) + end + end + end +end + + +describe "Redis feature store" do + break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + + include_examples "persistent_feature_store", RedisStoreTester + + def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) + test_hook = Object.new + version_counter = start_version + expect(test_hook).to receive(:before_update_transaction) { |base_key, key| + if version_counter <= end_version + new_flag = flag.clone + new_flag[:version] = version_counter + other_client.hset(base_key, key, new_flag.to_json) + version_counter = version_counter + 1 + end + }.at_least(:once) + test_hook + end + + tester = RedisStoreTester.new({ logger: $null_logger }) + + it "handles upsert race condition against external client with lower version" do + with_redis_test_client do |other_client| + flag = { key: "foo", version: 1 } + test_hook = make_concurrent_modifier_test_hook(other_client, flag, 2, 4) + tester = RedisStoreTester.new({ test_hook: test_hook, logger: $null_logger }) + + ensure_stop(tester.create_feature_store) do |store| + store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) + + my_ver = { key: "foo", version: 10 } + store.upsert(LaunchDarkly::FEATURES, my_ver) + result = store.get(LaunchDarkly::FEATURES, flag[:key]) + expect(result[:version]).to eq 10 + end + end + end + + it "handles upsert race condition against external client with higher version" do + with_redis_test_client do |other_client| + flag = { key: "foo", version: 1 } + test_hook = make_concurrent_modifier_test_hook(other_client, flag, 3, 3) + tester = RedisStoreTester.new({ test_hook: test_hook, logger: $null_logger }) + + ensure_stop(tester.create_feature_store) do |store| + store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) + + my_ver = { key: "foo", version: 2 } + store.upsert(LaunchDarkly::FEATURES, my_ver) + result = store.get(LaunchDarkly::FEATURES, flag[:key]) + expect(result[:version]).to eq 3 + end + end + end + + it "shuts down a custom Redis pool by default" do + unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } + tester = RedisStoreTester.new({ pool: unowned_pool, logger: $null_logger }) + store = tester.create_feature_store + + begin + store.init(LaunchDarkly::FEATURES => { }) + store.stop + + expect { unowned_pool.with {} }.to raise_error(ConnectionPool::PoolShuttingDownError) + ensure + unowned_pool.shutdown { |conn| conn.close } + end + end + + it "doesn't shut down a custom Redis pool if pool_shutdown_on_close = false" do + unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } + tester = RedisStoreTester.new({ pool: unowned_pool, pool_shutdown_on_close: false, logger: $null_logger }) + store = tester.create_feature_store + + begin + store.init(LaunchDarkly::FEATURES => { }) + store.stop + + expect { unowned_pool.with {} }.not_to raise_error + ensure + unowned_pool.shutdown { |conn| conn.close } + end + end +end + +describe "Redis big segment store" do + break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + + include_examples "big_segment_store", RedisStoreTester +end diff --git a/spec/integrations/test_data_spec.rb b/spec/integrations/test_data_spec.rb new file mode 100644 index 00000000..75418bd3 --- /dev/null +++ b/spec/integrations/test_data_spec.rb @@ -0,0 +1,241 @@ +require "ldclient-rb" + +module LaunchDarkly + module Integrations + describe 'TestData' do + it 'is a valid datasource' do + td = Integrations::TestData.data_source + config = Config.new(send_events: false, data_source: td) + client = LDClient.new('sdkKey', config) + expect(config.feature_store.all(FEATURES)).to eql({}) + client.close + end + + it 'initializes the feature store with existing flags' do + td = Integrations::TestData.data_source + td.update(td.flag('flag')) + config = Config.new(send_events: false, data_source: td) + client = LDClient.new('sdkKey', config) + expect(config.feature_store.get(FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 0 }, + offVariation: 1, + on: true, + version: 1 + }) + client.close + end + + it 'updates the feature store with new flags' do + td = Integrations::TestData.data_source + td.update(td.flag('flag')) + config = Config.new(send_events: false, data_source: td) + client = LDClient.new('sdkKey', config) + config2 = Config.new(send_events: false, data_source: td) + client2 = LDClient.new('sdkKey', config2) + + expect(config.feature_store.get(FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 0 }, + offVariation: 1, + on: true, + version: 1 + }) + expect(config2.feature_store.get(FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 0 }, + offVariation: 1, + on: true, + version: 1 + }) + + td.update(td.flag('flag').variation_for_all_users(false)) + + expect(config.feature_store.get(FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 1 }, + offVariation: 1, + on: true, + version: 2 + }) + expect(config2.feature_store.get(FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 1 }, + offVariation: 1, + on: true, + version: 2 + }) + + client.close + client2.close + end + + it 'can include preconfigured items' do + td = Integrations::TestData.data_source + td.use_preconfigured_flag({ key: 'my-flag', version: 1000, on: true }) + td.use_preconfigured_segment({ key: 'my-segment', version: 2000 }) + + config = Config.new(send_events: false, data_source: td) + client = LDClient.new('sdkKey', config) + + expect(config.feature_store.get(FEATURES, 'my-flag')).to eql({ + key: 'my-flag', version: 1000, on: true + }) + expect(config.feature_store.get(SEGMENTS, 'my-segment')).to eql({ + key: 'my-segment', version: 2000 + }) + + td.use_preconfigured_flag({ key: 'my-flag', on: false }) + + expect(config.feature_store.get(FEATURES, 'my-flag')).to eql({ + key: 'my-flag', version: 1001, on: false + }) + + td.use_preconfigured_segment({ key: 'my-segment', included: [ 'x' ] }) + + expect(config.feature_store.get(SEGMENTS, 'my-segment')).to eql({ + key: 'my-segment', version: 2001, included: [ 'x' ] + }) + + client.close + end + + it 'TestData.flag defaults to a boolean flag' do + td = TestData.new + f = td.flag('flag').build(0) + expect(f[:variations]).to eq([true, false]) + expect(f[:fallthrough][:variation]).to eq(0) + expect(f[:offVariation]).to eq(1) + end + + it 'TestData.flag returns a copy of the existing flag if it exists' do + td = TestData.new + td.update(td.flag('flag').variation_for_all_users(true)) + expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) + + #modify the flag but dont call update + td.flag('flag').variation_for_all_users(false).build(0) + + expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) + end + + describe 'FlagBuilder' do + + it 'defaults to targeting on and sets the flag key' do + f = TestData::FlagBuilder.new('flag').build(1) + expect(f[:key]).to eq('flag') + expect(f[:version]).to eq(1) + expect(f[:on]).to eq(true) + expect(f[:variations]).to be_empty + end + + it 'can set targeting off' do + f = TestData::FlagBuilder.new('flag').on(false).build(1) + expect(f[:on]).to eq(false) + end + + it 'can set fallthrough variation' do + f = TestData::FlagBuilder.new('flag').fallthrough_variation(0).build(1) + expect(f[:fallthrough][:variation]).to eq(0) + end + + it 'can set variation for when targeting is off' do + f = TestData::FlagBuilder.new('flag').off_variation(0).build(1) + expect(f[:offVariation]).to eq(0) + end + + it 'can set a list of variations' do + f = TestData::FlagBuilder.new('flag').variations(true, false).build(1) + expect(f[:variations]).to eq([true, false]) + end + + it 'has the boolean_flag shortcut method' do + f = TestData::FlagBuilder.new('flag').boolean_flag.build(1) + expect(f[:variations]).to eq([true, false]) + expect(f[:fallthrough][:variation]).to eq(0) + expect(f[:offVariation]).to eq(1) + end + + it 'can handle boolean or index variation' do + f = TestData::FlagBuilder.new('flag').off_variation(true).build(1) + expect(f[:variations]).to eq([true, false]) + expect(f[:offVariation]).to eq(0) + + f2 = TestData::FlagBuilder.new('flag').fallthrough_variation(true).build(1) + expect(f2[:variations]).to eq([true, false]) + expect(f2[:offVariation]).to eq(1) + end + + it 'can set variation for all users' do + f = TestData::FlagBuilder.new('flag').variation_for_all_users(true).build(1) + expect(f[:rules]).to be_nil + expect(f[:targets]).to be_nil + expect(f[:fallthrough][:variation]).to be(0) + end + + it 'clears existing rules when setting variation for all users' do + f = TestData::FlagBuilder.new('flag') + .if_match('name', 'ben') + .then_return(false) + .variation_for_user('ben', false) + .variation_for_all_users(true).build(1) + expect(f.keys).to_not include(:rules) + expect(f.keys).to_not include(:targets) + expect(f[:fallthrough][:variation]).to be(0) + end + + it 'can set a variation for a specific user' do + f = TestData::FlagBuilder.new('flag') + .variation_for_user('ben', false) + f2 = f.clone.variation_for_user('ben', true) + expect(f.build(0)[:targets]).to eql([ { variation: 1, values: ['ben'] } ]) + expect(f2.build(1)[:targets]).to_not include({ variation: 1, values: ['ben'] }) + expect(f2.build(1)[:targets]).to include({ variation: 0, values: ['ben'] }) + end + + it 'can make an immutable copy of its self' do + fb = TestData::FlagBuilder.new('flag').variation_for_all_users(true) + expect(fb.build(0)).to eql(fb.clone.build(0)) + + fcopy = fb.clone.variation_for_all_users(false).build(0) + f = fb.build(0) + + expect(f[:key]).to eql(fcopy[:key]) + expect(f[:variations]).to eql(fcopy[:variations]) + expect(f[:fallthrough][:variation]).to be(0) + expect(fcopy[:fallthrough][:variation]).to be(1) + end + + it 'can build rules based on attributes' do + f = TestData::FlagBuilder.new('flag') + .if_match('name', 'ben') + .and_not_match('country', 'fr') + .then_return(true) + .build(1) + expect(f[:rules]).to eql([{ + id: "rule0", + variation: 0, + clauses: [{ + attribute: 'name', + op: 'in', + values: ['ben'], + negate: false, + }, + { + attribute: 'country', + op: 'in', + values: ['fr'], + negate: true, + } + ] + }]) + end + end + end + end +end diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb index 6366a6b7..19c6c241 100644 --- a/spec/ldclient_end_to_end_spec.rb +++ b/spec/ldclient_end_to_end_spec.rb @@ -1,11 +1,8 @@ require "http_util" +require "mock_components" require "spec_helper" -SDK_KEY = "sdk-key" - -USER = { key: 'userkey' } - ALWAYS_TRUE_FLAG = { key: 'flagkey', version: 1, on: false, offVariation: 1, variations: [ false, true ] } DATA_WITH_ALWAYS_TRUE_FLAG = { flags: { ALWAYS_TRUE_FLAG[:key ].to_sym => ALWAYS_TRUE_FLAG }, @@ -13,15 +10,6 @@ } PUT_EVENT_WITH_ALWAYS_TRUE_FLAG = "event: put\ndata:{\"data\":#{DATA_WITH_ALWAYS_TRUE_FLAG.to_json}}\n\n'" -def with_client(config) - client = LaunchDarkly::LDClient.new(SDK_KEY, config) - begin - yield client - ensure - client.close - end -end - module LaunchDarkly # Note that we can't do end-to-end tests in streaming mode until we have a test server that can do streaming # responses, which is difficult in WEBrick. @@ -31,15 +19,9 @@ module LaunchDarkly with_server do |poll_server| poll_server.setup_ok_response("/sdk/latest-all", DATA_WITH_ALWAYS_TRUE_FLAG.to_json, "application/json") - config = Config.new( - stream: false, - base_uri: poll_server.base_uri.to_s, - send_events: false, - logger: NullLogger.new - ) - with_client(config) do |client| + with_client(test_config(stream: false, data_source: nil, base_uri: poll_server.base_uri.to_s)) do |client| expect(client.initialized?).to be true - expect(client.variation(ALWAYS_TRUE_FLAG[:key], USER, false)).to be true + expect(client.variation(ALWAYS_TRUE_FLAG[:key], basic_user, false)).to be true end end end @@ -48,74 +30,57 @@ module LaunchDarkly with_server do |poll_server| poll_server.setup_status_response("/sdk/latest-all", 401) - config = Config.new( - stream: false, - base_uri: poll_server.base_uri.to_s, - send_events: false, - logger: NullLogger.new - ) - with_client(config) do |client| + with_client(test_config(stream: false, data_source: nil, base_uri: poll_server.base_uri.to_s)) do |client| expect(client.initialized?).to be false - expect(client.variation(ALWAYS_TRUE_FLAG[:key], USER, false)).to be false + expect(client.variation(ALWAYS_TRUE_FLAG[:key], basic_user, false)).to be false end end end it "sends event without diagnostics" do - with_server do |poll_server| - with_server do |events_server| - events_server.setup_ok_response("/bulk", "") - poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") - - config = Config.new( - stream: false, - base_uri: poll_server.base_uri.to_s, - events_uri: events_server.base_uri.to_s, - diagnostic_opt_out: true, - logger: NullLogger.new - ) - with_client(config) do |client| - client.identify(USER) - client.flush + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + + config = test_config( + send_events: true, + events_uri: events_server.base_uri.to_s, + diagnostic_opt_out: true + ) + with_client(config) do |client| + client.identify(basic_user) + client.flush - req, body = events_server.await_request_with_body - expect(req.header['authorization']).to eq [ SDK_KEY ] - expect(req.header['connection']).to eq [ "Keep-Alive" ] - data = JSON.parse(body) - expect(data.length).to eq 1 - expect(data[0]["kind"]).to eq "identify" - end + req, body = events_server.await_request_with_body + expect(req.header['authorization']).to eq [ sdk_key ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] + data = JSON.parse(body) + expect(data.length).to eq 1 + expect(data[0]["kind"]).to eq "identify" end end end it "sends diagnostic event" do - with_server do |poll_server| - with_server do |events_server| - events_server.setup_ok_response("/bulk", "") - events_server.setup_ok_response("/diagnostic", "") - poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") - - config = Config.new( - stream: false, - base_uri: poll_server.base_uri.to_s, - events_uri: events_server.base_uri.to_s, - logger: NullLogger.new - ) - with_client(config) do |client| - user = { key: 'userkey' } - client.identify(user) - client.flush + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + events_server.setup_ok_response("/diagnostic", "") + + config = test_config( + send_events: true, + events_uri: events_server.base_uri.to_s + ) + with_client(config) do |client| + client.identify(basic_user) + client.flush - req0, body0 = events_server.await_request_with_body - req1, body1 = events_server.await_request_with_body - req = req0.path == "/diagnostic" ? req0 : req1 - body = req0.path == "/diagnostic" ? body0 : body1 - expect(req.header['authorization']).to eq [ SDK_KEY ] - expect(req.header['connection']).to eq [ "Keep-Alive" ] - data = JSON.parse(body) - expect(data["kind"]).to eq "diagnostic-init" - end + req0, body0 = events_server.await_request_with_body + req1, body1 = events_server.await_request_with_body + req = req0.path == "/diagnostic" ? req0 : req1 + body = req0.path == "/diagnostic" ? body0 : body1 + expect(req.header['authorization']).to eq [ sdk_key ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] + data = JSON.parse(body) + expect(data["kind"]).to eq "diagnostic-init" end end end @@ -126,23 +91,24 @@ module LaunchDarkly events_server.setup_ok_response("/bulk", "") poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") - config = Config.new( + config = test_config( stream: false, + data_source: nil, + send_events: true, base_uri: "http://fake-polling-server", events_uri: "http://fake-events-server", diagnostic_opt_out: true, - logger: NullLogger.new, socket_factory: SocketFactoryFromHash.new({ "fake-polling-server" => poll_server.port, "fake-events-server" => events_server.port }) ) with_client(config) do |client| - client.identify(USER) + client.identify(basic_user) client.flush req, body = events_server.await_request_with_body - expect(req.header['authorization']).to eq [ SDK_KEY ] + expect(req.header['authorization']).to eq [ sdk_key ] expect(req.header['connection']).to eq [ "Keep-Alive" ] data = JSON.parse(body) expect(data.length).to eq 1 diff --git a/spec/ldclient_evaluation_spec.rb b/spec/ldclient_evaluation_spec.rb new file mode 100644 index 00000000..c63cb882 --- /dev/null +++ b/spec/ldclient_evaluation_spec.rb @@ -0,0 +1,306 @@ +require "ldclient-rb" + +require "mock_components" +require "model_builders" +require "spec_helper" + +module LaunchDarkly + describe "LDClient evaluation tests" do + context "variation" do + it "returns the default value if the client is offline" do + with_client(test_config(offline: true)) do |offline_client| + result = offline_client.variation("doesntmatter", basic_user, "default") + expect(result).to eq "default" + end + end + + it "returns the default value for an unknown feature" do + with_client(test_config) do |client| + expect(client.variation("badkey", basic_user, "default")).to eq "default" + end + end + + it "returns the value for an existing feature" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + + with_client(test_config(data_source: td)) do |client| + expect(client.variation("flagkey", basic_user, "default")).to eq "value" + end + end + + it "returns the default value if a feature evaluates to nil" do + td = Integrations::TestData.data_source + td.use_preconfigured_flag({ # TestData normally won't construct a flag with offVariation: nil + key: "flagkey", + on: false, + offVariation: nil + }) + + with_client(test_config(data_source: td)) do |client| + expect(client.variation("flagkey", basic_user, "default")).to eq "default" + end + end + + it "can evaluate a flag that references a segment" do + td = Integrations::TestData.data_source + segment = SegmentBuilder.new("segmentkey").included(basic_user[:key]).build + td.use_preconfigured_segment(segment) + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").on(true).variations(true, false).rule( + RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) + ).build) + + with_client(test_config(data_source: td)) do |client| + expect(client.variation("flagkey", basic_user, false)).to be true + end + end + + it "can evaluate a flag that references a big segment" do + td = Integrations::TestData.data_source + segment = SegmentBuilder.new("segmentkey").unbounded(true).generation(1).build + td.use_preconfigured_segment(segment) + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").on(true).variations(true, false).rule( + RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) + ).build) + + segstore = MockBigSegmentStore.new + segstore.setup_segment_for_user(basic_user[:key], segment, true) + big_seg_config = BigSegmentsConfig.new(store: segstore) + + with_client(test_config(data_source: td, big_segments: big_seg_config)) do |client| + expect(client.variation("flagkey", basic_user, false)).to be true + end + end + end + + context "variation_detail" do + feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, + trackEvents: true, debugEventsUntilDate: 1000 } + + it "returns the default value if the client is offline" do + with_client(test_config(offline: true)) do |offline_client| + result = offline_client.variation_detail("doesntmatter", basic_user, "default") + expected = EvaluationDetail.new("default", nil, EvaluationReason::error(EvaluationReason::ERROR_CLIENT_NOT_READY)) + expect(result).to eq expected + end + end + + it "returns the default value for an unknown feature" do + with_client(test_config) do |client| + result = client.variation_detail("badkey", basic_user, "default") + expected = EvaluationDetail.new("default", nil, EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND)) + expect(result).to eq expected + end + end + + it "returns a value for an existing feature" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + + with_client(test_config(data_source: td)) do |client| + result = client.variation_detail("flagkey", basic_user, "default") + expected = EvaluationDetail.new("value", 0, EvaluationReason::off) + expect(result).to eq expected + end + end + + it "returns the default value if a feature evaluates to nil" do + td = Integrations::TestData.data_source + td.use_preconfigured_flag({ # TestData normally won't construct a flag with offVariation: nil + key: "flagkey", + on: false, + offVariation: nil + }) + + with_client(test_config(data_source: td)) do |client| + result = client.variation_detail("flagkey", basic_user, "default") + expected = EvaluationDetail.new("default", nil, EvaluationReason::off) + expect(result).to eq expected + expect(result.default_value?).to be true + end + end + + it "includes big segment status in reason when evaluating a flag that references a big segment" do + td = Integrations::TestData.data_source + segment = SegmentBuilder.new("segmentkey").unbounded(true).generation(1).build + td.use_preconfigured_segment(segment) + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").on(true).variations(true, false).rule( + RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) + ).build) + + segstore = MockBigSegmentStore.new + segstore.setup_segment_for_user(basic_user[:key], segment, true) + segstore.setup_metadata(Time.now) + big_seg_config = BigSegmentsConfig.new(store: segstore) + + with_client(test_config(data_source: td, big_segments: big_seg_config)) do |client| + result = client.variation_detail("flagkey", basic_user, false) + expect(result.value).to be true + expect(result.reason.big_segments_status).to eq(BigSegmentsStatus::HEALTHY) + end + end + end + + describe "all_flags" do + let(:flag1) { { key: "key1", offVariation: 0, variations: [ 'value1' ] } } + let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } + let(:test_data) { + td = Integrations::TestData.data_source + td.use_preconfigured_flag(flag1) + td.use_preconfigured_flag(flag2) + td + } + + it "returns flag values" do + with_client(test_config(data_source: test_data)) do |client| + result = client.all_flags({ key: 'userkey' }) + expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + end + end + + it "returns empty map for nil user" do + with_client(test_config(data_source: test_data)) do |client| + result = client.all_flags(nil) + expect(result).to eq({}) + end + end + + it "returns empty map for nil user key" do + with_client(test_config(data_source: test_data)) do |client| + result = client.all_flags({}) + expect(result).to eq({}) + end + end + + it "returns empty map if offline" do + with_client(test_config(data_source: test_data, offline: true)) do |offline_client| + result = offline_client.all_flags(nil) + expect(result).to eq({}) + end + end + end + + context "all_flags_state" do + let(:flag1) { { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } } + let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } + let(:test_data) { + td = Integrations::TestData.data_source + td.use_preconfigured_flag(flag1) + td.use_preconfigured_flag(flag2) + td + } + + it "returns flags state" do + + with_client(test_config(data_source: test_data)) do |client| + state = client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + + result = state.as_json + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + '$flagsState' => { + 'key1' => { + :variation => 0, + :version => 100 + }, + 'key2' => { + :variation => 1, + :version => 200, + :trackEvents => true, + :debugEventsUntilDate => 1000 + } + }, + '$valid' => true + }) + end + end + + it "can be filtered for only client-side flags" do + td = Integrations::TestData.data_source + td.use_preconfigured_flag({ key: "server-side-1", offVariation: 0, variations: [ 'a' ], clientSide: false }) + td.use_preconfigured_flag({ key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false }) + td.use_preconfigured_flag({ key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true }) + td.use_preconfigured_flag({ key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true }) + + with_client(test_config(data_source: td)) do |client| + state = client.all_flags_state({ key: 'userkey' }, client_side_only: true) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) + end + end + + it "can omit details for untracked flags" do + future_time = (Time.now.to_f * 1000).to_i + 100000 + td = Integrations::TestData.data_source + td.use_preconfigured_flag({ key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false }) + td.use_preconfigured_flag({ key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true }) + td.use_preconfigured_flag({ key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time }) + + with_client(test_config(data_source: td)) do |client| + state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2', 'key3' => 'value3' }) + + result = state.as_json + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + 'key3' => 'value3', + '$flagsState' => { + 'key1' => { + :variation => 0 + }, + 'key2' => { + :variation => 1, + :version => 200, + :trackEvents => true + }, + 'key3' => { + :variation => 1, + :version => 300, + :debugEventsUntilDate => future_time + } + }, + '$valid' => true + }) + end + end + + it "returns empty state for nil user" do + with_client(test_config(data_source: test_data)) do |client| + state = client.all_flags_state(nil) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + end + + it "returns empty state for nil user key" do + with_client(test_config(data_source: test_data)) do |client| + state = client.all_flags_state({}) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + end + + it "returns empty state if offline" do + with_client(test_config(data_source: test_data, offline: true)) do |offline_client| + state = offline_client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + end + end + end +end diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb new file mode 100644 index 00000000..86eaa77d --- /dev/null +++ b/spec/ldclient_events_spec.rb @@ -0,0 +1,300 @@ +require "ldclient-rb" + +require "mock_components" +require "model_builders" +require "spec_helper" + +module LaunchDarkly + describe "LDClient events tests" do + def event_processor(client) + client.instance_variable_get(:@event_processor) + end + + it 'uses NullEventProcessor if send_events is false' do + with_client(test_config(send_events: false)) do |client| + expect(event_processor(client)).to be_a(LaunchDarkly::NullEventProcessor) + end + end + + context "evaluation events - variation" do + it "unknown flag" do + with_client(test_config) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", key: "badkey", user: basic_user, value: "default", default: "default" + )) + client.variation("badkey", basic_user, "default") + end + end + + it "known flag" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + + with_client(test_config(data_source: td)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", + key: "flagkey", + version: 1, + user: basic_user, + variation: 0, + value: "value", + default: "default" + )) + client.variation("flagkey", basic_user, "default") + end + end + + it "does not send event, and logs error, if user is nil" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + + logger = double().as_null_object + + with_client(test_config(data_source: td, logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:error) + client.variation("flagkey", nil, "default") + end + end + + it "does not send event, and logs warning, if user key is nil" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + + logger = double().as_null_object + keyless_user = { key: nil } + + with_client(test_config(data_source: td, logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.variation("flagkey", keyless_user, "default") + end + end + + it "sets trackEvents and reason if trackEvents is set for matched rule" do + td = Integrations::TestData.data_source + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").version(100).on(true).variations("value"). + rule(RuleBuilder.new.variation(0).id("id").track_events(true). + clause(Clauses.match_user(basic_user))). + build + ) + + with_client(test_config(data_source: td)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", + key: "flagkey", + version: 100, + user: basic_user, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + reason: LaunchDarkly::EvaluationReason::rule_match(0, 'id') + )) + client.variation("flagkey", basic_user, "default") + end + end + + it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do + td = Integrations::TestData.data_source + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").version(100).on(true).variations("value").fallthrough_variation(0). + track_events_fallthrough(true).build + ) + + with_client(test_config(data_source: td)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", + key: "flagkey", + version: 100, + user: basic_user, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + reason: LaunchDarkly::EvaluationReason::fallthrough + )) + client.variation("flagkey", basic_user, "default") + end + end + end + + context "evaluation events - variation_detail" do + it "unknown flag" do + with_client(test_config) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", key: "badkey", user: basic_user, value: "default", default: "default", + reason: LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND) + )) + client.variation_detail("badkey", basic_user, "default") + end + end + + it "known flag" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + + with_client(test_config(data_source: td)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", + key: "flagkey", + version: 1, + user: basic_user, + variation: 0, + value: "value", + default: "default", + reason: LaunchDarkly::EvaluationReason::off + )) + client.variation_detail("flagkey", basic_user, "default") + end + end + + it "does not send event, and logs error, if user is nil" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + + logger = double().as_null_object + + with_client(test_config(data_source: td, logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:error) + client.variation_detail("flagkey", nil, "default") + end + end + + it "does not send event, and logs warning, if user key is nil" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + + logger = double().as_null_object + + with_client(test_config(data_source: td, logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.variation_detail("flagkey", { key: nil }, "default") + end + end + end + + context "identify" do + it "queues up an identify event" do + with_client(test_config) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "identify", key: basic_user[:key], user: basic_user)) + client.identify(basic_user) + end + end + + it "does not send event, and logs warning, if user is nil" do + logger = double().as_null_object + + with_client(test_config(logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.identify(nil) + end + end + + it "does not send event, and logs warning, if user key is nil" do + logger = double().as_null_object + + with_client(test_config(logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.identify({ key: nil }) + end + end + end + + context "track" do + it "queues up an custom event" do + with_client(test_config) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: basic_user, data: 42)) + client.track("custom_event_name", basic_user, 42) + end + end + + it "can include a metric value" do + with_client(test_config) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: basic_user, metricValue: 1.5)) + client.track("custom_event_name", basic_user, nil, 1.5) + end + end + + it "includes contextKind with anonymous user" do + anon_user = { key: 'user-key', anonymous: true } + + with_client(test_config) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: anon_user, metricValue: 2.2, contextKind: "anonymousUser")) + client.track("custom_event_name", anon_user, nil, 2.2) + end + end + + it "sanitizes the user in the event" do + numeric_key_user = { key: 33 } + sanitized_user = { key: "33" } + + with_client(test_config) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including(user: sanitized_user)) + client.track("custom_event_name", numeric_key_user, nil) + end + end + + it "does not send event, and logs a warning, if user is nil" do + logger = double().as_null_object + + with_client(test_config(logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.track("custom_event_name", nil, nil) + end + end + + it "does not send event, and logs warning, if user key is nil" do + logger = double().as_null_object + + with_client(test_config(logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.track("custom_event_name", { key: nil }, nil) + end + end + end + + context "alias" do + it "queues up an alias event" do + anon_user = { key: "user-key", anonymous: true } + + with_client(test_config) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "alias", key: basic_user[:key], contextKind: "user", previousKey: anon_user[:key], previousContextKind: "anonymousUser")) + client.alias(basic_user, anon_user) + end + end + + it "does not send event, and logs warning, if user is nil" do + logger = double().as_null_object + + with_client(test_config(logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.alias(nil, nil) + end + end + + it "does not send event, and logs warning, if user key is nil" do + logger = double().as_null_object + + with_client(test_config(logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.alias({ key: nil }, { key: nil }) + end + end + end + end +end diff --git a/spec/ldclient_listeners_spec.rb b/spec/ldclient_listeners_spec.rb new file mode 100644 index 00000000..8628f75b --- /dev/null +++ b/spec/ldclient_listeners_spec.rb @@ -0,0 +1,42 @@ +require "mock_components" +require "spec_helper" + +module LaunchDarkly + describe "LDClient event listeners/observers" do + context "big_segment_store_status_provider" do + it "returns unavailable status when not configured" do + with_client(test_config) do |client| + status = client.big_segment_store_status_provider.status + expect(status.available).to be(false) + expect(status.stale).to be(false) + end + end + + it "sends status updates" do + store = MockBigSegmentStore.new + store.setup_metadata(Time.now) + big_segments_config = BigSegmentsConfig.new( + store: store, + status_poll_interval: 0.01 + ) + with_client(test_config(big_segments: big_segments_config)) do |client| + status1 = client.big_segment_store_status_provider.status + expect(status1.available).to be(true) + expect(status1.stale).to be(false) + + statuses = Queue.new + observer = SimpleObserver.adding_to_queue(statuses) + client.big_segment_store_status_provider.add_observer(observer) + + store.setup_metadata_error(StandardError.new("sorry")) + + status2 = statuses.pop() + expect(status2.available).to be(false) + expect(status2.stale).to be(false) + + expect(client.big_segment_store_status_provider.status).to eq(status2) + end + end + end + end +end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 8e2ef650..ef689deb 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -1,632 +1,105 @@ +require "mock_components" require "spec_helper" +module LaunchDarkly + describe LDClient do + subject { LDClient } -describe LaunchDarkly::LDClient do - subject { LaunchDarkly::LDClient } - let(:offline_config) { LaunchDarkly::Config.new({offline: true}) } - let(:offline_client) do - subject.new("secret", offline_config) - end - let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } - let(:logger) { double().as_null_object } - let(:config) { LaunchDarkly::Config.new({ send_events: false, data_source: null_data, logger: logger }) } - let(:client) do - subject.new("secret", config) - end - let(:feature) do - data = File.read(File.join("spec", "fixtures", "feature.json")) - JSON.parse(data, symbolize_names: true) - end - let(:user) do - { - key: "user@test.com", - custom: { - groups: [ "microsoft", "google" ] - } - } - end - let(:user_anonymous) do - { - key: "anonymous@test.com", - anonymous: true - } - end - let(:numeric_key_user) do - { - key: 33, - custom: { - groups: [ "microsoft", "google" ] - } - } - end - let(:sanitized_numeric_key_user) do - { - key: "33", - custom: { - groups: [ "microsoft", "google" ] - } - } - end - let(:user_without_key) do - { name: "Keyless Joe" } - end - - def event_processor - client.instance_variable_get(:@event_processor) - end - - describe "constructor requirement of non-nil sdk key" do - it "is not enforced when offline" do - subject.new(nil, offline_config) - end - - it "is not enforced if use_ldd is true and send_events is false" do - subject.new(nil, LaunchDarkly::Config.new({ use_ldd: true, send_events: false })) - end - - it "is not enforced if using file data and send_events is false" do - source = LaunchDarkly::FileDataSource.factory({}) - subject.new(nil, LaunchDarkly::Config.new({ data_source: source, send_events: false })) - end - - it "is enforced in streaming mode even if send_events is false" do - expect { - subject.new(nil, LaunchDarkly::Config.new({ send_events: false })) - }.to raise_error(ArgumentError) - end - - it "is enforced in polling mode even if send_events is false" do - expect { - subject.new(nil, LaunchDarkly::Config.new({ stream: false, send_events: false })) - }.to raise_error(ArgumentError) - end - - it "is enforced if use_ldd is true and send_events is true" do - expect { - subject.new(nil, LaunchDarkly::Config.new({ use_ldd: true })) - }.to raise_error(ArgumentError) - end - - it "is enforced if using file data and send_events is true" do - source = LaunchDarkly::FileDataSource.factory({}) - expect { - subject.new(nil, LaunchDarkly::Config.new({ data_source: source })) - }.to raise_error(ArgumentError) - end - end - - describe '#variation' do - feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, - trackEvents: true, debugEventsUntilDate: 1000 } - - it "returns the default value if the client is offline" do - result = offline_client.variation("doesntmatter", user, "default") - expect(result).to eq "default" - end - - it "returns the default value for an unknown feature" do - expect(client.variation("badkey", user, "default")).to eq "default" - end - - it "queues a feature request event for an unknown feature" do - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", key: "badkey", user: user, value: "default", default: "default" - )) - client.variation("badkey", user, "default") - end - - it "returns the value for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(client.variation("key", user, "default")).to eq "value" - end - - it "returns the default value if a feature evaluates to nil" do - empty_feature = { key: "key", on: false, offVariation: nil } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) - expect(client.variation("key", user, "default")).to eq "default" - end - - it "queues a feature request event for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", - key: "key", - version: 100, - user: user, - variation: 0, - value: "value", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000 - )) - client.variation("key", user, "default") - end - - it "does not send an event if user is nil" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:error) - client.variation("key", nil, "default") - end - - it "queues a feature event for an existing feature when user is anonymous" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", - key: "key", - version: 100, - contextKind: "anonymousUser", - user: user_anonymous, - variation: 0, - value: "value", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000 - )) - client.variation("key", user_anonymous, "default") - end - - it "does not queue a feature event for an existing feature when user key is nil" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - bad_user = { name: "Bob" } - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.variation("key", bad_user, "default") - end - - it "sets trackEvents and reason if trackEvents is set for matched rule" do - flag = { - key: 'flag', - on: true, - variations: [ 'value' ], - version: 100, - rules: [ - clauses: [ - { attribute: 'key', op: 'in', values: [ user[:key] ] } - ], - variation: 0, - id: 'id', - trackEvents: true - ] - } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, flag) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: 'feature', - key: 'flag', - version: 100, - user: user, - value: 'value', - default: 'default', - trackEvents: true, - reason: LaunchDarkly::EvaluationReason::rule_match(0, 'id') - )) - client.variation('flag', user, 'default') - end - - it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do - flag = { - key: 'flag', - on: true, - variations: [ 'value' ], - fallthrough: { variation: 0 }, - version: 100, - rules: [], - trackEventsFallthrough: true - } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, flag) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: 'feature', - key: 'flag', - version: 100, - user: user, - value: 'value', - default: 'default', - trackEvents: true, - reason: LaunchDarkly::EvaluationReason::fallthrough - )) - client.variation('flag', user, 'default') - end - end - - describe '#variation_detail' do - feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, - trackEvents: true, debugEventsUntilDate: 1000 } - - it "returns the default value if the client is offline" do - result = offline_client.variation_detail("doesntmatter", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, - LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_CLIENT_NOT_READY)) - expect(result).to eq expected - end - - it "returns the default value for an unknown feature" do - result = client.variation_detail("badkey", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, - LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND)) - expect(result).to eq expected - end - - it "queues a feature request event for an unknown feature" do - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", key: "badkey", user: user, value: "default", default: "default", - reason: LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND) - )) - client.variation_detail("badkey", user, "default") - end - - it "returns a value for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - result = client.variation_detail("key", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("value", 0, LaunchDarkly::EvaluationReason::off) - expect(result).to eq expected - end - - it "returns the default value if a feature evaluates to nil" do - empty_feature = { key: "key", on: false, offVariation: nil } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) - result = client.variation_detail("key", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, LaunchDarkly::EvaluationReason::off) - expect(result).to eq expected - expect(result.default_value?).to be true - end - - it "queues a feature request event for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", - key: "key", - version: 100, - user: user, - variation: 0, - value: "value", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000, - reason: LaunchDarkly::EvaluationReason::off - )) - client.variation_detail("key", user, "default") - end - - it "does not send an event if user is nil" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:error) - client.variation_detail("key", nil, "default") - end - end - - describe '#all_flags' do - let(:flag1) { { key: "key1", offVariation: 0, variations: [ 'value1' ] } } - let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } - - it "returns flag values" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - result = client.all_flags({ key: 'userkey' }) - expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) - end - - it "returns empty map for nil user" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - result = client.all_flags(nil) - expect(result).to eq({}) - end - - it "returns empty map for nil user key" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - result = client.all_flags({}) - expect(result).to eq({}) - end - - it "returns empty map if offline" do - offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - result = offline_client.all_flags(nil) - expect(result).to eq({}) - end - end - - describe '#all_flags_state' do - let(:flag1) { { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } } - let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } - - it "returns flags state" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - state = client.all_flags_state({ key: 'userkey' }) - expect(state.valid?).to be true - - values = state.values_map - expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) - - result = state.as_json - expect(result).to eq({ - 'key1' => 'value1', - 'key2' => 'value2', - '$flagsState' => { - 'key1' => { - :variation => 0, - :version => 100 - }, - 'key2' => { - :variation => 1, - :version => 200, - :trackEvents => true, - :debugEventsUntilDate => 1000 - } - }, - '$valid' => true - }) - end - - it "can be filtered for only client-side flags" do - flag1 = { key: "server-side-1", offVariation: 0, variations: [ 'a' ], clientSide: false } - flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } - flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } - flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } - config.feature_store.init({ LaunchDarkly::FEATURES => { - flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 - }}) - - state = client.all_flags_state({ key: 'userkey' }, client_side_only: true) - expect(state.valid?).to be true - - values = state.values_map - expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) - end - - it "can omit details for untracked flags" do - future_time = (Time.now.to_f * 1000).to_i + 100000 - flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } - flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } - flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } - - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) - - state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) - expect(state.valid?).to be true - - values = state.values_map - expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2', 'key3' => 'value3' }) - - result = state.as_json - expect(result).to eq({ - 'key1' => 'value1', - 'key2' => 'value2', - 'key3' => 'value3', - '$flagsState' => { - 'key1' => { - :variation => 0 - }, - 'key2' => { - :variation => 1, - :version => 200, - :trackEvents => true - }, - 'key3' => { - :variation => 1, - :version => 300, - :debugEventsUntilDate => future_time - } - }, - '$valid' => true - }) - end - - it "returns empty state for nil user" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - state = client.all_flags_state(nil) - expect(state.valid?).to be false - expect(state.values_map).to eq({}) - end - - it "returns empty state for nil user key" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - state = client.all_flags_state({}) - expect(state.valid?).to be false - expect(state.values_map).to eq({}) - end - - it "returns empty state if offline" do - offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - state = offline_client.all_flags_state({ key: 'userkey' }) - expect(state.valid?).to be false - expect(state.values_map).to eq({}) - end - end - - describe '#secure_mode_hash' do - it "will return the expected value for a known message and secret" do - result = client.secure_mode_hash({key: :Message}) - expect(result).to eq "aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597" - end - end - - describe '#track' do - it "queues up an custom event" do - expect(event_processor).to receive(:add_event).with(hash_including(kind: "custom", key: "custom_event_name", user: user, data: 42)) - client.track("custom_event_name", user, 42) - end - - it "can include a metric value" do - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "custom", key: "custom_event_name", user: user, metricValue: 1.5)) - client.track("custom_event_name", user, nil, 1.5) - end - - it "includes contextKind with anonymous user" do - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "custom", key: "custom_event_name", user: user_anonymous, metricValue: 2.2, contextKind: "anonymousUser")) - client.track("custom_event_name", user_anonymous, nil, 2.2) - end - - it "sanitizes the user in the event" do - expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) - client.track("custom_event_name", numeric_key_user, nil) - end - - it "does not send an event, and logs a warning, if user is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.track("custom_event_name", nil, nil) - end - - it "does not send an event, and logs a warning, if user key is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.track("custom_event_name", user_without_key, nil) - end - end - - describe '#alias' do - it "queues up an alias event" do - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "alias", key: user[:key], contextKind: "user", previousKey: user_anonymous[:key], previousContextKind: "anonymousUser")) - client.alias(user, user_anonymous) - end - - it "does not send an event, and logs a warning, if user is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.alias(nil, nil) - end - - it "does not send an event, and logs a warning, if user key is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.alias(user_without_key, user_without_key) - end - end - - describe '#identify' do - it "queues up an identify event" do - expect(event_processor).to receive(:add_event).with(hash_including(kind: "identify", key: user[:key], user: user)) - client.identify(user) - end - - it "does not send an event, and logs a warning, if user is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.identify(nil) - end - - it "does not send an event, and logs a warning, if user key is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.identify(user_without_key) - end - end - - describe 'with send_events: false' do - let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, data_source: null_data}) } - let(:client) { subject.new("secret", config) } - - it "uses a NullEventProcessor" do - ep = client.instance_variable_get(:@event_processor) - expect(ep).to be_a(LaunchDarkly::NullEventProcessor) - end - end - - describe 'with send_events: true' do - let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, diagnostic_opt_out: true, data_source: null_data}) } - let(:client_with_events) { subject.new("secret", config_with_events) } - - it "does not use a NullEventProcessor" do - ep = client_with_events.instance_variable_get(:@event_processor) - expect(ep).not_to be_a(LaunchDarkly::NullEventProcessor) - end - end - - describe "feature store data ordering" do - let(:dependency_ordering_test_data) { - { - LaunchDarkly::FEATURES => { - a: { key: "a", prerequisites: [ { key: "b" }, { key: "c" } ] }, - b: { key: "b", prerequisites: [ { key: "c" }, { key: "e" } ] }, - c: { key: "c" }, - d: { key: "d" }, - e: { key: "e" }, - f: { key: "f" } - }, - LaunchDarkly::SEGMENTS => { - o: { key: "o" } - } - } - } + context "constructor requirement of non-nil sdk key" do + it "is not enforced when offline" do + subject.new(nil, Config.new(offline: true)) + end - class FakeFeatureStore - attr_reader :received_data + it "is not enforced if use_ldd is true and send_events is false" do + subject.new(nil, Config.new({ use_ldd: true, send_events: false })) + end - def init(all_data) - @received_data = all_data + it "is not enforced if using file data and send_events is false" do + source = LaunchDarkly::Integrations::FileData.data_source({}) + subject.new(nil, Config.new({ data_source: source, send_events: false })) end - end - class FakeUpdateProcessor - def initialize(store, data) - @store = store - @data = data + it "is enforced in streaming mode even if send_events is false" do + expect { + subject.new(nil, Config.new({ send_events: false })) + }.to raise_error(ArgumentError) end - def start - @store.init(@data) - ev = Concurrent::Event.new - ev.set - ev + it "is enforced in polling mode even if send_events is false" do + expect { + subject.new(nil, Config.new({ stream: false, send_events: false })) + }.to raise_error(ArgumentError) end - def stop + it "is enforced if use_ldd is true and send_events is true" do + expect { + subject.new(nil, Config.new({ use_ldd: true })) + }.to raise_error(ArgumentError) end - def initialized? - true + it "is enforced if using file data and send_events is true" do + source = LaunchDarkly::Integrations::FileData.data_source({}) + expect { + subject.new(nil, Config.new({ data_source: source })) + }.to raise_error(ArgumentError) end end - it "passes data set to feature store in correct order on init" do - store = FakeFeatureStore.new - data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.feature_store, - dependency_ordering_test_data) } - config = LaunchDarkly::Config.new(send_events: false, feature_store: store, data_source: data_source_factory) - client = subject.new("secret", config) + context "secure_mode_hash" do + it "will return the expected value for a known message and secret" do + ensure_close(subject.new("secret", test_config)) do |client| + result = client.secure_mode_hash({key: :Message}) + expect(result).to eq "aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597" + end + end + end - data = store.received_data - expect(data).not_to be_nil - expect(data.count).to eq(2) - - # Segments should always come first - expect(data.keys[0]).to be(LaunchDarkly::SEGMENTS) - expect(data.values[0].count).to eq(dependency_ordering_test_data[LaunchDarkly::SEGMENTS].count) + context "feature store data ordering" do + let(:dependency_ordering_test_data) { + { + FEATURES => { + a: { key: "a", prerequisites: [ { key: "b" }, { key: "c" } ] }, + b: { key: "b", prerequisites: [ { key: "c" }, { key: "e" } ] }, + c: { key: "c" }, + d: { key: "d" }, + e: { key: "e" }, + f: { key: "f" } + }, + SEGMENTS => { + o: { key: "o" } + } + } + } - # Features should be ordered so that a flag always appears after its prerequisites, if any - expect(data.keys[1]).to be(LaunchDarkly::FEATURES) - flags_map = data.values[1] - flags_list = flags_map.values - expect(flags_list.count).to eq(dependency_ordering_test_data[LaunchDarkly::FEATURES].count) - flags_list.each_with_index do |item, item_index| - (item[:prerequisites] || []).each do |prereq| - prereq = flags_map[prereq[:key].to_sym] - prereq_index = flags_list.index(prereq) - if prereq_index > item_index - all_keys = (flags_list.map { |f| f[:key] }).join(", ") - raise "#{item[:key]} depends on #{prereq[:key]}, but #{item[:key]} was listed first; keys in order are [#{all_keys}]" + it "passes data set to feature store in correct order on init" do + store = CapturingFeatureStore.new + td = Integrations::TestData.data_source + dependency_ordering_test_data[FEATURES].each { |key, flag| td.use_preconfigured_flag(flag) } + dependency_ordering_test_data[SEGMENTS].each { |key, segment| td.use_preconfigured_segment(segment) } + + with_client(test_config(feature_store: store, data_source: td)) do |client| + data = store.received_data + expect(data).not_to be_nil + expect(data.count).to eq(2) + + # Segments should always come first + expect(data.keys[0]).to be(SEGMENTS) + expect(data.values[0].count).to eq(dependency_ordering_test_data[SEGMENTS].count) + + # Features should be ordered so that a flag always appears after its prerequisites, if any + expect(data.keys[1]).to be(FEATURES) + flags_map = data.values[1] + flags_list = flags_map.values + expect(flags_list.count).to eq(dependency_ordering_test_data[FEATURES].count) + flags_list.each_with_index do |item, item_index| + (item[:prerequisites] || []).each do |prereq| + prereq = flags_map[prereq[:key].to_sym] + prereq_index = flags_list.index(prereq) + if prereq_index > item_index + all_keys = (flags_list.map { |f| f[:key] }).join(", ") + raise "#{item[:key]} depends on #{prereq[:key]}, but #{item[:key]} was listed first; keys in order are [#{all_keys}]" + end + end end end end diff --git a/spec/mock_components.rb b/spec/mock_components.rb new file mode 100644 index 00000000..07dd851a --- /dev/null +++ b/spec/mock_components.rb @@ -0,0 +1,105 @@ +require "spec_helper" + +require "ldclient-rb/impl/big_segments" +require "ldclient-rb/impl/evaluator" +require "ldclient-rb/interfaces" + +def sdk_key + "sdk-key" +end + +def null_data + LaunchDarkly::NullUpdateProcessor.new +end + +def null_logger + double().as_null_object +end + +def base_config + { + data_source: null_data, + send_events: false, + logger: null_logger + } +end + +def test_config(add_props = {}) + LaunchDarkly::Config.new(base_config.merge(add_props)) +end + +def with_client(config) + ensure_close(LaunchDarkly::LDClient.new(sdk_key, config)) do |client| + yield client + end +end + +def basic_user + { "key": "user-key" } +end + +module LaunchDarkly + class CapturingFeatureStore + attr_reader :received_data + + def init(all_data) + @received_data = all_data + end + + def stop + end + end + + class MockBigSegmentStore + def initialize + @metadata = nil + @metadata_error = nil + @memberships = {} + end + + def get_metadata + raise @metadata_error if !@metadata_error.nil? + @metadata + end + + def get_membership(user_hash) + @memberships[user_hash] + end + + def stop + end + + def setup_metadata(last_up_to_date) + @metadata = Interfaces::BigSegmentStoreMetadata.new(last_up_to_date.to_f * 1000) + end + + def setup_metadata_error(ex) + @metadata_error = ex + end + + def setup_membership(user_key, membership) + user_hash = Impl::BigSegmentStoreManager.hash_for_user_key(user_key) + @memberships[user_hash] = membership + end + + def setup_segment_for_user(user_key, segment, included) + user_hash = Impl::BigSegmentStoreManager.hash_for_user_key(user_key) + @memberships[user_hash] ||= {} + @memberships[user_hash][Impl::Evaluator.make_big_segment_ref(segment)] = included + end + end + + class SimpleObserver + def initialize(fn) + @fn = fn + end + + def update(value) + @fn.call(value) + end + + def self.adding_to_queue(q) + new(->(value) { q << value }) + end + end +end diff --git a/spec/model_builders.rb b/spec/model_builders.rb new file mode 100644 index 00000000..a7c0bd6e --- /dev/null +++ b/spec/model_builders.rb @@ -0,0 +1,154 @@ + +class FlagBuilder + def initialize(key) + @flag = { + key: key, + version: 1, + variations: [ false ], + rules: [] + } + end + + def build + @flag.clone + end + + def version(value) + @flag[:version] = value + self + end + + def variations(*values) + @flag[:variations] = values + self + end + + def on(value) + @flag[:on] = value + self + end + + def rule(r) + @flag[:rules].append(r.build) + self + end + + def off_with_value(value) + @flag[:variations] = [ value ] + @flag[:offVariation] = 0 + @flag[:on] = false + self + end + + def off_variation(value) + @flag[:offVariation] = value + self + end + + def fallthrough_variation(value) + @flag[:fallthrough] = { variation: value } + self + end + + def track_events(value) + @flag[:trackEvents] = value + self + end + + def track_events_fallthrough(value) + @flag[:trackEventsFallthrough] = value + self + end + + def debug_events_until_date(value) + @flag[:debugEventsUntilDate] = value + self + end +end + +class RuleBuilder + def initialize() + @rule = { + id: "", + variation: 0, + clauses: [] + } + end + + def build + @rule.clone + end + + def id(value) + @rule[:id] = value + self + end + + def variation(value) + @rule[:variation] = value + self + end + + def clause(c) + @rule[:clauses].append(c) + self + end + + def track_events(value) + @rule[:trackEvents] = value + self + end +end + +class SegmentBuilder + def initialize(key) + @segment = { + key: key, + version: 1, + included: [], + excluded: [] + } + end + + def build + @segment.clone + end + + def included(*keys) + @segment[:included] = keys + self + end + + def excluded(*keys) + @segment[:excluded] = keys + self + end + + def unbounded(value) + @segment[:unbounded] = value + self + end + + def generation(value) + @segment[:generation] = value + self + end +end + +class Clauses + def self.match_segment(segment) + { + "attribute": "", + "op": "segmentMatch", + "values": [ segment.is_a?(Hash) ? segment[:key] : segment ] + } + end + + def self.match_user(user) + { + "attribute": "key", + "op": "in", + "values": [ user[:key] ] + } + end +end diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb deleted file mode 100644 index 6dd5733e..00000000 --- a/spec/redis_feature_store_spec.rb +++ /dev/null @@ -1,121 +0,0 @@ -require "feature_store_spec_base" -require "connection_pool" -require "json" -require "redis" -require "spec_helper" - - -$my_prefix = 'testprefix' - -$base_opts = { - prefix: $my_prefix, - logger: $null_log -} - -def create_redis_store(opts = {}) - LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 60 })) -end - -def create_redis_store_uncached(opts = {}) - LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 0 })) -end - -def clear_all_data - client = Redis.new - client.flushdb -end - - -describe LaunchDarkly::RedisFeatureStore do - subject { LaunchDarkly::RedisFeatureStore } - - break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' - - # These tests will all fail if there isn't a Redis instance running on the default port. - - context "real Redis with local cache" do - include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) - end - - context "real Redis without local cache" do - include_examples "feature_store", method(:create_redis_store_uncached), method(:clear_all_data) - end - - def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) - test_hook = Object.new - version_counter = start_version - expect(test_hook).to receive(:before_update_transaction) { |base_key, key| - if version_counter <= end_version - new_flag = flag.clone - new_flag[:version] = version_counter - other_client.hset(base_key, key, new_flag.to_json) - version_counter = version_counter + 1 - end - }.at_least(:once) - test_hook - end - - it "handles upsert race condition against external client with lower version" do - other_client = Redis.new({ url: "redis://localhost:6379" }) - flag = { key: "foo", version: 1 } - test_hook = make_concurrent_modifier_test_hook(other_client, flag, 2, 4) - store = create_redis_store({ test_hook: test_hook }) - - begin - store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) - - my_ver = { key: "foo", version: 10 } - store.upsert(LaunchDarkly::FEATURES, my_ver) - result = store.get(LaunchDarkly::FEATURES, flag[:key]) - expect(result[:version]).to eq 10 - ensure - other_client.close - end - end - - it "handles upsert race condition against external client with higher version" do - other_client = Redis.new({ url: "redis://localhost:6379" }) - flag = { key: "foo", version: 1 } - test_hook = make_concurrent_modifier_test_hook(other_client, flag, 3, 3) - store = create_redis_store({ test_hook: test_hook }) - - begin - store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) - - my_ver = { key: "foo", version: 2 } - store.upsert(LaunchDarkly::FEATURES, my_ver) - result = store.get(LaunchDarkly::FEATURES, flag[:key]) - expect(result[:version]).to eq 3 - ensure - other_client.close - end - end - - it "shuts down a custom Redis pool by default" do - unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } - store = create_redis_store({ pool: unowned_pool }) - - begin - store.init(LaunchDarkly::FEATURES => { }) - store.stop - - expect { unowned_pool.with {} }.to raise_error(ConnectionPool::PoolShuttingDownError) - ensure - unowned_pool.shutdown { |conn| conn.close } - end - end - - it "doesn't shut down a custom Redis pool if pool_shutdown_on_close = false" do - unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } - store = create_redis_store({ pool: unowned_pool, pool_shutdown_on_close: false }) - - begin - store.init(LaunchDarkly::FEATURES => { }) - store.stop - - expect { unowned_pool.with {} }.not_to raise_error(ConnectionPool::PoolShuttingDownError) - ensure - unowned_pool.shutdown { |conn| conn.close } - end - end -end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 8438ecc2..c54ef444 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -3,6 +3,22 @@ $null_log = ::Logger.new($stdout) $null_log.level = ::Logger::FATAL +def ensure_close(thing) + begin + yield thing + ensure + thing.close + end +end + +def ensure_stop(thing) + begin + yield thing + ensure + thing.stop + end +end + RSpec.configure do |config| config.before(:each) do end