From 96a1cc30737e13108d9a5211a6b998db1096e8fe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 4 Oct 2018 19:17:42 -0700 Subject: [PATCH 01/50] add option to reduce front-end metadata for untracked flags --- ldclient/client.py | 10 +++-- ldclient/flags_state.py | 12 ++++-- testing/test_flags_state.py | 21 +++++----- testing/test_ldclient_evaluation.py | 59 +++++++++++++++++++++++++++-- 4 files changed, 81 insertions(+), 21 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 683a5c3b..039fad52 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -296,7 +296,10 @@ def all_flags_state(self, user, **kwargs): :param kwargs: optional parameters affecting how the state is computed: set `client_side_only=True` to limit it to only flags that are marked for use with the client-side SDK (by default, all flags are included); set `with_reasons=True` to - include evaluation reasons in the state (see `variation_detail`) + include evaluation reasons in the state (see `variation_detail`); set + `details_only_for_tracked_flags=True` to omit any metadata that is normally only + used for event generation, such as flag versions and evaluation reasons, unless + the flag has event tracking or debugging turned on :return: a FeatureFlagsState object (will never be None; its 'valid' property will be False if the client is offline, has not been initialized, or the user is None or has no key) :rtype: FeatureFlagsState @@ -319,6 +322,7 @@ def all_flags_state(self, user, **kwargs): state = FeatureFlagsState(True) client_only = kwargs.get('client_side_only', False) with_reasons = kwargs.get('with_reasons', False) + details_only_if_tracked = kwargs.get('details_only_for_tracked_flags', False) try: flags_map = self._store.all(FEATURES, lambda x: x) if flags_map is None: @@ -333,12 +337,12 @@ def all_flags_state(self, user, **kwargs): try: detail = evaluate(flag, user, self._store, False).detail state.add_flag(flag, detail.value, detail.variation_index, - detail.reason if with_reasons else None) + detail.reason if with_reasons else None, details_only_if_tracked) except Exception as e: log.error("Error evaluating flag \"%s\" in all_flags_state: %s" % (key, e)) log.debug(traceback.format_exc()) reason = {'kind': 'ERROR', 'errorKind': 'EXCEPTION'} - state.add_flag(flag, None, None, reason if with_reasons else None) + state.add_flag(flag, None, None, reason if with_reasons else None, details_only_if_tracked) return state diff --git a/ldclient/flags_state.py b/ldclient/flags_state.py index c76b4908..cbfde1ec 100644 --- a/ldclient/flags_state.py +++ b/ldclient/flags_state.py @@ -12,15 +12,19 @@ def __init__(self, valid): self.__flag_metadata = {} self.__valid = valid - def add_flag(self, flag, value, variation, reason): + def add_flag(self, flag, value, variation, reason, details_only_if_tracked): """Used internally to build the state map.""" key = flag['key'] self.__flag_values[key] = value - meta = { 'version': flag.get('version'), 'trackEvents': flag.get('trackEvents') } + meta = {} + if (not details_only_if_tracked) or flag.get('trackEvents') or flag.get('debugEventsUntilDate'): + meta['version'] = flag.get('version') + if reason is not None: + meta['reason'] = reason if variation is not None: meta['variation'] = variation - if reason is not None: - meta['reason'] = reason + if flag.get('trackEvents'): + meta['trackEvents'] = True if flag.get('debugEventsUntilDate') is not None: meta['debugEventsUntilDate'] = flag.get('debugEventsUntilDate') self.__flag_metadata[key] = meta diff --git a/testing/test_flags_state.py b/testing/test_flags_state.py index 2fe5b123..45ea6404 100644 --- a/testing/test_flags_state.py +++ b/testing/test_flags_state.py @@ -6,7 +6,7 @@ def test_can_get_flag_value(): state = FeatureFlagsState(True) flag = { 'key': 'key' } - state.add_flag(flag, 'value', 1, None) + state.add_flag(flag, 'value', 1, None, False) assert state.get_flag_value('key') == 'value' def test_returns_none_for_unknown_flag(): @@ -17,16 +17,16 @@ def test_can_convert_to_values_map(): state = FeatureFlagsState(True) flag1 = { 'key': 'key1' } flag2 = { 'key': 'key2' } - state.add_flag(flag1, 'value1', 0, None) - state.add_flag(flag2, 'value2', 1, None) + state.add_flag(flag1, 'value1', 0, None, False) + state.add_flag(flag2, 'value2', 1, None, False) assert state.to_values_map() == { 'key1': 'value1', 'key2': 'value2' } def test_can_convert_to_json_dict(): state = FeatureFlagsState(True) flag1 = { 'key': 'key1', 'version': 100, 'offVariation': 0, 'variations': [ 'value1' ], 'trackEvents': False } flag2 = { 'key': 'key2', 'version': 200, 'offVariation': 1, 'variations': [ 'x', 'value2' ], 'trackEvents': True, 'debugEventsUntilDate': 1000 } - state.add_flag(flag1, 'value1', 0, None) - state.add_flag(flag2, 'value2', 1, None) + state.add_flag(flag1, 'value1', 0, None, False) + state.add_flag(flag2, 'value2', 1, None, False) result = state.to_json_dict() assert result == { @@ -35,8 +35,7 @@ def test_can_convert_to_json_dict(): '$flagsState': { 'key1': { 'variation': 0, - 'version': 100, - 'trackEvents': False + 'version': 100 }, 'key2': { 'variation': 1, @@ -52,8 +51,8 @@ def test_can_convert_to_json_string(): state = FeatureFlagsState(True) flag1 = { 'key': 'key1', 'version': 100, 'offVariation': 0, 'variations': [ 'value1' ], 'trackEvents': False } flag2 = { 'key': 'key2', 'version': 200, 'offVariation': 1, 'variations': [ 'x', 'value2' ], 'trackEvents': True, 'debugEventsUntilDate': 1000 } - state.add_flag(flag1, 'value1', 0, None) - state.add_flag(flag2, 'value2', 1, None) + state.add_flag(flag1, 'value1', 0, None, False) + state.add_flag(flag2, 'value2', 1, None, False) obj = state.to_json_dict() str = state.to_json_string() @@ -63,8 +62,8 @@ def test_can_serialize_with_jsonpickle(): state = FeatureFlagsState(True) flag1 = { 'key': 'key1', 'version': 100, 'offVariation': 0, 'variations': [ 'value1' ], 'trackEvents': False } flag2 = { 'key': 'key2', 'version': 200, 'offVariation': 1, 'variations': [ 'x', 'value2' ], 'trackEvents': True, 'debugEventsUntilDate': 1000 } - state.add_flag(flag1, 'value1', 0, None) - state.add_flag(flag2, 'value2', 1, None) + state.add_flag(flag1, 'value1', 0, None, False) + state.add_flag(flag2, 'value2', 1, None, False) obj = state.to_json_dict() str = jsonpickle.encode(state, unpicklable=False) diff --git a/testing/test_ldclient_evaluation.py b/testing/test_ldclient_evaluation.py index 9183034b..81719564 100644 --- a/testing/test_ldclient_evaluation.py +++ b/testing/test_ldclient_evaluation.py @@ -149,8 +149,7 @@ def test_all_flags_state_returns_state(): '$flagsState': { 'key1': { 'variation': 0, - 'version': 100, - 'trackEvents': False + 'version': 100 }, 'key2': { 'variation': 1, @@ -176,7 +175,6 @@ def test_all_flags_state_returns_state_with_reasons(): 'key1': { 'variation': 0, 'version': 100, - 'trackEvents': False, 'reason': {'kind': 'OFF'} }, 'key2': { @@ -229,6 +227,61 @@ def test_all_flags_state_can_be_filtered_for_client_side_flags(): values = state.to_values_map() assert values == { 'client-side-1': 'value1', 'client-side-2': 'value2' } +def test_all_flags_state_can_omit_details_for_untracked_flags(): + flag1 = { + 'key': 'key1', + 'version': 100, + 'on': False, + 'offVariation': 0, + 'variations': [ 'value1' ], + 'trackEvents': False + } + flag2 = { + 'key': 'key2', + 'version': 200, + 'on': False, + 'offVariation': 1, + 'variations': [ 'x', 'value2' ], + 'trackEvents': True + } + flag3 = { + 'key': 'key3', + 'version': 300, + 'on': False, + 'offVariation': 1, + 'variations': [ 'x', 'value3' ], + 'debugEventsUntilDate': 1000 + } + store = InMemoryFeatureStore() + store.init({ FEATURES: { 'key1': flag1, 'key2': flag2, 'key3': flag3 } }) + client = make_client(store) + state = client.all_flags_state(user, with_reasons=True, details_only_for_tracked_flags=True) + assert state.valid == True + result = state.to_json_dict() + assert result == { + 'key1': 'value1', + 'key2': 'value2', + 'key3': 'value3', + '$flagsState': { + 'key1': { + 'variation': 0 + }, + 'key2': { + 'variation': 1, + 'version': 200, + 'trackEvents': True, + 'reason': {'kind': 'OFF'} + }, + 'key3': { + 'variation': 1, + 'version': 300, + 'debugEventsUntilDate': 1000, + 'reason': {'kind': 'OFF'} + } + }, + '$valid': True + } + def test_all_flags_state_returns_empty_state_if_user_is_none(): store = InMemoryFeatureStore() store.init({ FEATURES: { 'key1': flag1, 'key2': flag2 } }) From 89056fc7587ba16f9573e707c82be42af14a7b20 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 8 Oct 2018 16:33:39 -0700 Subject: [PATCH 02/50] fix logic for whether a flag is tracked in all_flags_state --- ldclient/flags_state.py | 8 +++++++- testing/test_ldclient_evaluation.py | 6 ++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/ldclient/flags_state.py b/ldclient/flags_state.py index cbfde1ec..c5a8ab41 100644 --- a/ldclient/flags_state.py +++ b/ldclient/flags_state.py @@ -1,4 +1,5 @@ import json +import time class FeatureFlagsState(object): """ @@ -17,7 +18,12 @@ def add_flag(self, flag, value, variation, reason, details_only_if_tracked): key = flag['key'] self.__flag_values[key] = value meta = {} - if (not details_only_if_tracked) or flag.get('trackEvents') or flag.get('debugEventsUntilDate'): + with_details = (not details_only_if_tracked) or flag.get('trackEvents') + if not with_details: + if flag.get('debugEventsUntilDate'): + now = int(time.time() * 1000) + with_details = (flag.get('debugEventsUntilDate') > now) + if with_details: meta['version'] = flag.get('version') if reason is not None: meta['reason'] = reason diff --git a/testing/test_ldclient_evaluation.py b/testing/test_ldclient_evaluation.py index 81719564..46c48756 100644 --- a/testing/test_ldclient_evaluation.py +++ b/testing/test_ldclient_evaluation.py @@ -1,5 +1,6 @@ import pytest import json +import time from ldclient.client import LDClient, Config from ldclient.feature_store import InMemoryFeatureStore from ldclient.flag import EvaluationDetail @@ -228,6 +229,7 @@ def test_all_flags_state_can_be_filtered_for_client_side_flags(): assert values == { 'client-side-1': 'value1', 'client-side-2': 'value2' } def test_all_flags_state_can_omit_details_for_untracked_flags(): + future_time = (time.time() * 1000) + 100000 flag1 = { 'key': 'key1', 'version': 100, @@ -250,7 +252,7 @@ def test_all_flags_state_can_omit_details_for_untracked_flags(): 'on': False, 'offVariation': 1, 'variations': [ 'x', 'value3' ], - 'debugEventsUntilDate': 1000 + 'debugEventsUntilDate': future_time } store = InMemoryFeatureStore() store.init({ FEATURES: { 'key1': flag1, 'key2': flag2, 'key3': flag3 } }) @@ -275,7 +277,7 @@ def test_all_flags_state_can_omit_details_for_untracked_flags(): 'key3': { 'variation': 1, 'version': 300, - 'debugEventsUntilDate': 1000, + 'debugEventsUntilDate': future_time, 'reason': {'kind': 'OFF'} } }, From 1fc23e4e5a1e4d0a1f4256df5faf1c36bf85c4eb Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 14 Oct 2018 00:25:44 -0700 Subject: [PATCH 03/50] use expiringdict from PyPi --- NOTICE.txt | 2 - ldclient/expiringdict.py | 155 -------------------------------- ldclient/redis_feature_store.py | 2 +- requirements.txt | 1 + 4 files changed, 2 insertions(+), 158 deletions(-) delete mode 100644 NOTICE.txt delete mode 100644 ldclient/expiringdict.py diff --git a/NOTICE.txt b/NOTICE.txt deleted file mode 100644 index 24f9d0e4..00000000 --- a/NOTICE.txt +++ /dev/null @@ -1,2 +0,0 @@ -This product includes software (ExpiringDict) developed by -Mailgun (https://github.com/mailgun/expiringdict). \ No newline at end of file diff --git a/ldclient/expiringdict.py b/ldclient/expiringdict.py deleted file mode 100644 index 4b244c21..00000000 --- a/ldclient/expiringdict.py +++ /dev/null @@ -1,155 +0,0 @@ -''' -Dictionary with auto-expiring values for caching purposes. - -Expiration happens on any access, object is locked during cleanup from expired -values. Can not store more than max_len elements - the oldest will be deleted. - ->>> ExpiringDict(max_len=100, max_age_seconds=10) - -The values stored in the following way: -{ - key1: (value1, created_time1), - key2: (value2, created_time2) -} - -NOTE: iteration over dict and also keys() do not remove expired values! - -Copied from https://github.com/mailgun/expiringdict/commit/d17d071721dd12af6829819885a74497492d7fb7 under the APLv2 - -TODO - Use PyPI version once https://github.com/mailgun/expiringdict/issues/13 has been fixed so that -https://github.com/mailgun/expiringdict/commit/62c50ce7083a1557a1140dae19145f3a0a7a1a14 is patched -''' - -import time -from threading import RLock - -from collections import OrderedDict - - -class ExpiringDict(OrderedDict): - - def __init__(self, max_len, max_age_seconds): - assert max_age_seconds >= 0 - assert max_len >= 1 - - OrderedDict.__init__(self) - self.max_len = max_len - self.max_age = max_age_seconds - self.lock = RLock() - - def __contains__(self, key): - """ Return True if the dict has a key, else return False. """ - try: - with self.lock: - item = OrderedDict.__getitem__(self, key) - if time.time() - item[1] < self.max_age: - return True - else: - del self[key] - except KeyError: - pass - return False - - def __getitem__(self, key, with_age=False): - """ Return the item of the dict. - - Raises a KeyError if key is not in the map. - """ - with self.lock: - item = OrderedDict.__getitem__(self, key) - item_age = time.time() - item[1] - if item_age < self.max_age: - if with_age: - return item[0], item_age - else: - return item[0] - else: - del self[key] - raise KeyError(key) - - def __setitem__(self, key, value): - """ Set d[key] to value. """ - with self.lock: - if len(self) == self.max_len: - self.popitem(last=False) - OrderedDict.__setitem__(self, key, (value, time.time())) - - def pop(self, key, default=None): - """ Get item from the dict and remove it. - - Return default if expired or does not exist. Never raise KeyError. - """ - with self.lock: - try: - item = OrderedDict.__getitem__(self, key) - del self[key] - return item[0] - except KeyError: - return default - - def ttl(self, key): - """ Return TTL of the `key` (in seconds). - - Returns None for non-existent or expired keys. - """ - key_value, key_age = self.get(key, with_age=True) - if key_age: - key_ttl = self.max_age - key_age - if key_ttl > 0: - return key_ttl - return None - - def get(self, key, default=None, with_age=False): - " Return the value for key if key is in the dictionary, else default. " - try: - return self.__getitem__(key, with_age) - except KeyError: - if with_age: - return default, None - else: - return default - - def items(self): - """ Return a copy of the dictionary's list of (key, value) pairs. """ - r = [] - for key in self: - try: - r.append((key, self[key])) - except KeyError: - pass - return r - - def values(self): - """ Return a copy of the dictionary's list of values. - See the note for dict.items(). """ - r = [] - for key in self: - try: - r.append(self[key]) - except KeyError: - pass - return r - - def fromkeys(self): - " Create a new dictionary with keys from seq and values set to value. " - raise NotImplementedError() - - def iteritems(self): - """ Return an iterator over the dictionary's (key, value) pairs. """ - raise NotImplementedError() - - def itervalues(self): - """ Return an iterator over the dictionary's values. """ - raise NotImplementedError() - - def viewitems(self): - " Return a new view of the dictionary's items ((key, value) pairs). " - raise NotImplementedError() - - def viewkeys(self): - """ Return a new view of the dictionary's keys. """ - raise NotImplementedError() - - def viewvalues(self): - """ Return a new view of the dictionary's values. """ - raise NotImplementedError() diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index b016a1eb..71b7261b 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -1,10 +1,10 @@ import json from pprint import pprint +from expiringdict import ExpiringDict import redis from ldclient import log -from ldclient.expiringdict import ExpiringDict from ldclient.interfaces import FeatureStore from ldclient.memoized_value import MemoizedValue from ldclient.versioned_data_kind import FEATURES diff --git a/requirements.txt b/requirements.txt index 90a5ef51..8787ac53 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,6 @@ backoff>=1.4.3 certifi>=2018.4.16 +expiringdict>=1.1.4 future>=0.16.0 six>=1.10.0 pyRFC3339>=1.0 From ae8b25eb33ad3dbca21231f22ece4a96694f731c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 15:04:39 -0700 Subject: [PATCH 04/50] implement file data source, not including auto-update --- ldclient/file_data_source.py | 95 ++++++++++++++ test-requirements.txt | 3 +- testing/test_file_data_source.py | 205 +++++++++++++++++++++++++++++++ 3 files changed, 302 insertions(+), 1 deletion(-) create mode 100644 ldclient/file_data_source.py create mode 100644 testing/test_file_data_source.py diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py new file mode 100644 index 00000000..d82930d9 --- /dev/null +++ b/ldclient/file_data_source.py @@ -0,0 +1,95 @@ +import json +import six +import traceback + +have_yaml = False +try: + import yaml + have_yaml = True +except ImportError: + pass + +from ldclient.interfaces import UpdateProcessor +from ldclient.util import log +from ldclient.versioned_data_kind import FEATURES, SEGMENTS + + +class FileDataSource(UpdateProcessor): + @classmethod + def factory(cls, **kwargs): + return lambda config, store, ready : FileDataSource(store, kwargs, ready) + + def __init__(self, store, options, ready): + self._store = store + self._ready = ready + self._inited = False + self._paths = options.get('paths', []) + if isinstance(self._paths, six.string_types): + self._paths = [ self._paths ] + + def start(self): + self._load_all() + + # We will signal readiness immediately regardless of whether the file load succeeded or failed - + # the difference can be detected by checking initialized() + self._ready.set() + + def stop(self): + pass + + def initialized(self): + return self._inited + + def _load_all(self): + all_data = { FEATURES: {}, SEGMENTS: {} } + print "Loading: %s" % self._paths + for path in self._paths: + try: + self._load_file(path, all_data) + except Exception as e: + log.error('Unable to load flag data from "%s": %s' % (path, repr(e))) + traceback.print_exc() + return + print "Initing: %s" % all_data + self._store.init(all_data) + self._inited = True + + def _load_file(self, path, all_data): + content = None + with open(path, 'r') as f: + content = f.read() + parsed = self._parse_content(content) + for key, flag in six.iteritems(parsed.get('flags', {})): + self._add_item(all_data, FEATURES, flag) + for key, value in six.iteritems(parsed.get('flagValues', {})): + self._add_item(all_data, FEATURES, self._make_flag_with_value(key, value)) + for key, segment in six.iteritems(parsed.get('segments', {})): + self._add_item(all_data, SEGMENTS, segment) + + def _parse_content(self, content): + if have_yaml: + if content.strip().startswith("{"): + print("json: %s" % content) + return json.loads(content) + else: + return yaml.load(content) + print("json: %s" % content) + return json.loads(content) + + def _add_item(self, all_data, kind, item): + items = all_data[kind] + key = item.get('key') + if items.get(key) is None: + items[key] = item + else: + raise Exception('In %s, key "%s" was used more than once' % (kind.namespace, key)) + + def _make_flag_with_value(self, key, value): + return { + 'key': key, + 'on': True, + 'fallthrough': { + 'variation': 0 + }, + 'variations': [ value ] + } diff --git a/test-requirements.txt b/test-requirements.txt index ee547312..1aa5903e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -4,4 +4,5 @@ redis>=2.10.5 coverage>=4.4 pytest-capturelog>=0.7 pytest-cov>=2.4.0 -codeclimate-test-reporter>=0.2.1 \ No newline at end of file +codeclimate-test-reporter>=0.2.1 +pyyaml>=3.0 diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py new file mode 100644 index 00000000..de4d9393 --- /dev/null +++ b/testing/test_file_data_source.py @@ -0,0 +1,205 @@ +import json +import os +import pytest +import tempfile +import threading +import time + +from ldclient.client import LDClient +from ldclient.config import Config +from ldclient.feature_store import InMemoryFeatureStore +from ldclient.file_data_source import FileDataSource +from ldclient.versioned_data_kind import FEATURES, SEGMENTS + + +all_flag_keys = [ 'flag1', 'flag2' ] +all_properties_json = ''' + { + "flags": { + "flag1": { + "key": "flag1", + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] + } + }, + "flagValues": { + "flag2": "value2" + }, + "segments": { + "seg1": { + "key": "seg1", + "include": ["user1"] + } + } + } +''' + +all_properties_yaml = ''' +--- +flags: + flag1: + key: flag1 + "on": true +flagValues: + flag2: value2 +segments: + seg1: + key: seg1 + include: ["user1"] +''' + +flag_only_json = ''' + { + "flags": { + "flag1": { + "key": "flag1", + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] + } + } + } +''' + +segment_only_json = ''' + { + "segments": { + "seg1": { + "key": "seg1", + "include": ["user1"] + } + } + } +''' + +fds = None +store = None +ready = None + + +def setup_function(): + global fds, store, ready + store = InMemoryFeatureStore() + ready = threading.Event() + +def teardown_function(): + if fds is not None: + fds.stop() + +def make_temp_file(content): + f, path = tempfile.mkstemp() + os.write(f, content) + os.close(f) + return path + +def replace_file(path, content): + with open(path, 'w') as f: + f.write(content) + +def test_does_not_load_data_prior_to_start(): + path = make_temp_file('{"flagValues":{"key":"value"}}') + try: + fds = FileDataSource.factory(paths = path)(Config(), store, ready) + assert ready.is_set() is False + assert fds.initialized() is False + assert store.initialized is False + finally: + os.remove(path) + +def test_loads_flags_on_start_from_json(): + path = make_temp_file(all_properties_json) + try: + fds = FileDataSource.factory(paths = path)(Config(), store, ready) + fds.start() + assert store.initialized is True + assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys + finally: + os.remove(path) + +def test_loads_flags_on_start_from_yaml(): + path = make_temp_file(all_properties_yaml) + try: + fds = FileDataSource.factory(paths = path)(Config(), store, ready) + fds.start() + assert store.initialized is True + assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys + finally: + os.remove(path) + +def test_sets_ready_event_and_initialized_on_successful_load(): + path = make_temp_file(all_properties_json) + try: + fds = FileDataSource.factory(paths = path)(Config(), store, ready) + fds.start() + assert fds.initialized() is True + assert ready.is_set() is True + finally: + os.remove(path) + +def test_sets_ready_event_and_does_not_set_initialized_on_unsuccessful_load(): + bad_file_path = 'no-such-file' + fds = FileDataSource.factory(paths = bad_file_path)(Config(), store, ready) + fds.start() + assert fds.initialized() is False + assert ready.is_set() is True + +def test_can_load_multiple_files(): + path1 = make_temp_file(flag_only_json) + path2 = make_temp_file(segment_only_json) + try: + fds = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) + fds.start() + assert len(store.all(FEATURES, lambda x: x)) == 1 + assert len(store.all(SEGMENTS, lambda x: x)) == 1 + finally: + os.remove(path1) + os.remove(path2) + +def test_does_not_allow_duplicate_keys(): + path1 = make_temp_file(flag_only_json) + path2 = make_temp_file(flag_only_json) + try: + fds = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) + fds.start() + assert len(store.all(FEATURES, lambda x: x)) == 0 + finally: + os.remove(path1) + os.remove(path2) + +def test_does_not_reload_modified_file_if_auto_update_is_off(): + path = make_temp_file(flag_only_json) + try: + fds = FileDataSource.factory(paths = path)(Config(), store, ready) + fds.start() + assert len(store.all(SEGMENTS, lambda x: x)) == 0 + time.sleep(0.5) + replace_file(path, segment_only_json) + time.sleep(0.5) + assert len(store.all(SEGMENTS, lambda x: x)) == 0 + finally: + os.remove(path) + +def test_evaluates_full_flag_with_client_as_expected(): + path = make_temp_file(all_properties_json) + try: + fds = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = fds, send_events = False)) + value = client.variation('flag1', { 'key': 'user' }, '') + assert value == 'on' + finally: + os.remove(path) + +def test_evaluates_simplified_flag_with_client_as_expected(): + path = make_temp_file(all_properties_json) + try: + fds = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = fds, send_events = False)) + value = client.variation('flag2', { 'key': 'user' }, '') + assert value == 'value2' + finally: + os.remove(path) From 850837d72794b6e5e175590304fe844b808213d8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 15:53:56 -0700 Subject: [PATCH 05/50] rm debugging --- ldclient/file_data_source.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index d82930d9..5ebb062d 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -42,7 +42,6 @@ def initialized(self): def _load_all(self): all_data = { FEATURES: {}, SEGMENTS: {} } - print "Loading: %s" % self._paths for path in self._paths: try: self._load_file(path, all_data) From aa7684a5181143a0d7c0874c3d01f1b837f4c3b2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 16:01:26 -0700 Subject: [PATCH 06/50] rm debugging --- ldclient/file_data_source.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index 5ebb062d..a8351ba6 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -49,7 +49,6 @@ def _load_all(self): log.error('Unable to load flag data from "%s": %s' % (path, repr(e))) traceback.print_exc() return - print "Initing: %s" % all_data self._store.init(all_data) self._inited = True @@ -68,11 +67,9 @@ def _load_file(self, path, all_data): def _parse_content(self, content): if have_yaml: if content.strip().startswith("{"): - print("json: %s" % content) return json.loads(content) else: return yaml.load(content) - print("json: %s" % content) return json.loads(content) def _add_item(self, all_data, kind, item): From 39c90424302e934502db4cff01e9f9de96cd2e65 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 16:30:34 -0700 Subject: [PATCH 07/50] Python 3 compatibility fix --- testing/test_file_data_source.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index de4d9393..9b4a2c7b 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -1,6 +1,7 @@ import json import os import pytest +import six import tempfile import threading import time @@ -93,7 +94,7 @@ def teardown_function(): def make_temp_file(content): f, path = tempfile.mkstemp() - os.write(f, content) + os.write(f, six.b(content)) os.close(f) return path From a43bf0c56789f26f80199e260ca04c4b9cb6b918 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:14:55 -0700 Subject: [PATCH 08/50] add file watching, update documentation and tests --- ldclient/file_data_source.py | 194 ++++++++++++++++++++++++++++++- test-requirements.txt | 1 + testing/test_file_data_source.py | 76 +++++++----- 3 files changed, 239 insertions(+), 32 deletions(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index a8351ba6..09afa263 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -1,4 +1,5 @@ import json +import os import six import traceback @@ -9,7 +10,17 @@ except ImportError: pass +have_watchdog = False +try: + import watchdog + import watchdog.events + import watchdog.observers + have_watchdog = True +except ImportError: + pass + from ldclient.interfaces import UpdateProcessor +from ldclient.repeating_timer import RepeatingTimer from ldclient.util import log from ldclient.versioned_data_kind import FEATURES, SEGMENTS @@ -17,6 +28,101 @@ class FileDataSource(UpdateProcessor): @classmethod def factory(cls, **kwargs): + """Provides a way to use local files as a source of feature flag state. This would typically be + used in a test environment, to operate using a predetermined feature flag state without an + actual LaunchDarkly connection. + + To use this component, call `FileDataSource.factory`, and store its return value in the + `update_processor_class` property of your LaunchDarkly client configuration. In the options + to `factory`, set `paths` to the file path(s) of your data file(s): + :: + + factory = FileDataSource.factory(paths: [ myFilePath ]) + config = Config(update_processor_class = factory) + + This will cause the client not to connect to LaunchDarkly to get feature flags. The + client may still make network connections to send analytics events, unless you have disabled + this with Config.send_events or Config.offline. + + Flag data files can be either JSON or YAML (in order to use YAML, you must install the 'pyyaml' + package). They contain an object with three possible properties: + + * "flags": Feature flag definitions. + * "flagValues": Simplified feature flags that contain only a value. + * "segments": User segment definitions. + + The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + and is subject to change. Rather than trying to construct these objects yourself, it is simpler + to request existing flags directly from the LaunchDarkly server in JSON format, and use this + output as the starting point for your file. In Linux you would do this: + :: + + curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all + + The output will look something like this (but with many more properties): + :: + + { + "flags": { + "flag-key-1": { + "key": "flag-key-1", + "on": true, + "variations": [ "a", "b" ] + } + }, + "segments": { + "segment-key-1": { + "key": "segment-key-1", + "includes": [ "user-key-1" ] + } + } + } + + Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + set specific flag keys to specific values. For that, you can use a much simpler format: + :: + + { + "flagValues": { + "my-string-flag-key": "value-1", + "my-boolean-flag-key": true, + "my-integer-flag-key": 3 + } + } + + Or, in YAML: + :: + + flagValues: + my-string-flag-key: "value-1" + my-boolean-flag-key: true + my-integer-flag-key: 1 + + It is also possible to specify both "flags" and "flagValues", if you want some flags + to have simple values and others to have complex behavior. However, it is an error to use the + same flag key or segment key more than once, either in a single file or across multiple files. + + If the data source encounters any error in any file-- malformed content, a missing file, or a + duplicate key-- it will not load flags from any of the files. + + :param kwargs: + See below + + :Keyword arguments: + * **paths** (array): The paths of the source files for loading flag data. These may be absolute paths + or relative to the current working directory. Files will be parsed as JSON unless the 'pyyaml' + package is installed, in which case YAML is also allowed. + * **auto_update** (boolean): True if the data source should watch for changes to the source file(s) + and reload flags whenever there is a change. The default implementation of this feature is based on + polling the filesystem, which may not perform well; if you install the 'watchdog' package (not + included by default, to avoid adding unwanted dependencies to the SDK), its native file watching + mechanism will be used instead. Note that auto-updating will only work if all of the files you + specified have valid directory paths at startup time. + * **poll_interval** (float): The minimum interval, in seconds, between checks for file modifications - + used only if auto_update is true, and if the native file-watching mechanism from 'watchdog' is not + being used. The default value is 1 second. + """ return lambda config, store, ready : FileDataSource(store, kwargs, ready) def __init__(self, store, options, ready): @@ -26,16 +132,25 @@ def __init__(self, store, options, ready): self._paths = options.get('paths', []) if isinstance(self._paths, six.string_types): self._paths = [ self._paths ] - + self._auto_update = options.get('auto_update', False) + self._poll_interval = options.get('poll_interval', 1) + self._force_polling = options.get('force_polling', False) # used only in tests + def start(self): self._load_all() + if self._auto_update: + self._auto_updater = self._start_auto_updater() + else: + self._auto_updater = None + # We will signal readiness immediately regardless of whether the file load succeeded or failed - # the difference can be detected by checking initialized() self._ready.set() def stop(self): - pass + if self._auto_updater: + self._auto_updater.stop() def initialized(self): return self._inited @@ -66,10 +181,7 @@ def _load_file(self, path, all_data): def _parse_content(self, content): if have_yaml: - if content.strip().startswith("{"): - return json.loads(content) - else: - return yaml.load(content) + return yaml.load(content) # pyyaml correctly parses JSON too return json.loads(content) def _add_item(self, all_data, kind, item): @@ -89,3 +201,73 @@ def _make_flag_with_value(self, key, value): }, 'variations': [ value ] } + + def _start_auto_updater(self): + resolved_paths = [] + for path in self._paths: + try: + resolved_paths.append(os.path.realpath(path)) + except: + log.warn('Cannot watch for changes to data file "%s" because it is an invalid path' % path) + if have_watchdog and not self._force_polling: + return FileDataSource.WatchdogAutoUpdater(resolved_paths, self._load_all) + else: + return FileDataSource.PollingAutoUpdater(resolved_paths, self._load_all, self._poll_interval) + + # Watch for changes to data files using the watchdog package. This uses native OS filesystem notifications + # if available for the current platform. + class WatchdogAutoUpdater(object): + def __init__(self, resolved_paths, reloader): + watched_files = set(resolved_paths) + + class LDWatchdogHandler(watchdog.events.FileSystemEventHandler): + def on_any_event(self, event): + if event.src_path in watched_files: + reloader() + + dir_paths = set() + for path in resolved_paths: + dir_paths.add(os.path.dirname(path)) + + self._observer = watchdog.observers.Observer() + handler = LDWatchdogHandler() + for path in dir_paths: + self._observer.schedule(handler, path) + self._observer.start() + + def stop(self): + self._observer.stop() + self._observer.join() + + # Watch for changes to data files by polling their modification times. This is used if auto-update is + # on but the watchdog package is not installed. + class PollingAutoUpdater(object): + def __init__(self, resolved_paths, reloader, interval): + self._paths = resolved_paths + self._reloader = reloader + self._file_times = self._check_file_times() + self._timer = RepeatingTimer(interval, self._poll) + self._timer.start() + + def stop(self): + self._timer.stop() + + def _poll(self): + new_times = self._check_file_times() + changed = False + for file_path, file_time in six.iteritems(self._file_times): + if new_times.get(file_path) is not None and new_times.get(file_path) != file_time: + changed = True + break + self._file_times = new_times + if changed: + self._reloader() + + def _check_file_times(self): + ret = {} + for path in self._paths: + try: + ret[path] = os.path.getmtime(path) + except: + ret[path] = None + return ret diff --git a/test-requirements.txt b/test-requirements.txt index 1aa5903e..413ef355 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,3 +6,4 @@ pytest-capturelog>=0.7 pytest-cov>=2.4.0 codeclimate-test-reporter>=0.2.1 pyyaml>=3.0 +watchdog>=0.9 diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 9b4a2c7b..4fa16bff 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -78,19 +78,19 @@ } ''' -fds = None +data_source = None store = None ready = None def setup_function(): - global fds, store, ready + global data_source, store, ready store = InMemoryFeatureStore() ready = threading.Event() def teardown_function(): - if fds is not None: - fds.stop() + if data_source is not None: + data_source.stop() def make_temp_file(content): f, path = tempfile.mkstemp() @@ -105,9 +105,9 @@ def replace_file(path, content): def test_does_not_load_data_prior_to_start(): path = make_temp_file('{"flagValues":{"key":"value"}}') try: - fds = FileDataSource.factory(paths = path)(Config(), store, ready) + data_source = FileDataSource.factory(paths = path)(Config(), store, ready) assert ready.is_set() is False - assert fds.initialized() is False + assert data_source.initialized() is False assert store.initialized is False finally: os.remove(path) @@ -115,8 +115,8 @@ def test_does_not_load_data_prior_to_start(): def test_loads_flags_on_start_from_json(): path = make_temp_file(all_properties_json) try: - fds = FileDataSource.factory(paths = path)(Config(), store, ready) - fds.start() + data_source = FileDataSource.factory(paths = path)(Config(), store, ready) + data_source.start() assert store.initialized is True assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys finally: @@ -125,8 +125,8 @@ def test_loads_flags_on_start_from_json(): def test_loads_flags_on_start_from_yaml(): path = make_temp_file(all_properties_yaml) try: - fds = FileDataSource.factory(paths = path)(Config(), store, ready) - fds.start() + data_source = FileDataSource.factory(paths = path)(Config(), store, ready) + data_source.start() assert store.initialized is True assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys finally: @@ -135,26 +135,26 @@ def test_loads_flags_on_start_from_yaml(): def test_sets_ready_event_and_initialized_on_successful_load(): path = make_temp_file(all_properties_json) try: - fds = FileDataSource.factory(paths = path)(Config(), store, ready) - fds.start() - assert fds.initialized() is True + data_source = FileDataSource.factory(paths = path)(Config(), store, ready) + data_source.start() + assert data_source.initialized() is True assert ready.is_set() is True finally: os.remove(path) def test_sets_ready_event_and_does_not_set_initialized_on_unsuccessful_load(): bad_file_path = 'no-such-file' - fds = FileDataSource.factory(paths = bad_file_path)(Config(), store, ready) - fds.start() - assert fds.initialized() is False + data_source = FileDataSource.factory(paths = bad_file_path)(Config(), store, ready) + data_source.start() + assert data_source.initialized() is False assert ready.is_set() is True def test_can_load_multiple_files(): path1 = make_temp_file(flag_only_json) path2 = make_temp_file(segment_only_json) try: - fds = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) - fds.start() + data_source = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) + data_source.start() assert len(store.all(FEATURES, lambda x: x)) == 1 assert len(store.all(SEGMENTS, lambda x: x)) == 1 finally: @@ -165,8 +165,8 @@ def test_does_not_allow_duplicate_keys(): path1 = make_temp_file(flag_only_json) path2 = make_temp_file(flag_only_json) try: - fds = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) - fds.start() + data_source = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) + data_source.start() assert len(store.all(FEATURES, lambda x: x)) == 0 finally: os.remove(path1) @@ -175,8 +175,8 @@ def test_does_not_allow_duplicate_keys(): def test_does_not_reload_modified_file_if_auto_update_is_off(): path = make_temp_file(flag_only_json) try: - fds = FileDataSource.factory(paths = path)(Config(), store, ready) - fds.start() + data_source = FileDataSource.factory(paths = path)(Config(), store, ready) + data_source.start() assert len(store.all(SEGMENTS, lambda x: x)) == 0 time.sleep(0.5) replace_file(path, segment_only_json) @@ -185,22 +185,46 @@ def test_does_not_reload_modified_file_if_auto_update_is_off(): finally: os.remove(path) +def do_auto_update_test(options): + path = make_temp_file(flag_only_json) + options['paths'] = path + try: + data_source = FileDataSource.factory(**options)(Config(), store, ready) + data_source.start() + assert len(store.all(SEGMENTS, lambda x: x)) == 0 + time.sleep(0.5) + replace_file(path, segment_only_json) + time.sleep(0.5) + assert len(store.all(SEGMENTS, lambda x: x)) == 1 + finally: + os.remove(path) + +def test_reloads_modified_file_if_auto_update_is_on(): + do_auto_update_test({ 'auto_update': True }) + +def test_reloads_modified_file_in_polling_mode(): + do_auto_update_test({ 'auto_update': True, 'force_polling': True, 'poll_interval': 0.1 }) + def test_evaluates_full_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - fds = FileDataSource.factory(paths = path) - client = LDClient(config=Config(update_processor_class = fds, send_events = False)) + data_source = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = data_source, send_events = False)) value = client.variation('flag1', { 'key': 'user' }, '') assert value == 'on' finally: os.remove(path) + if client is not None: + client.close() def test_evaluates_simplified_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - fds = FileDataSource.factory(paths = path) - client = LDClient(config=Config(update_processor_class = fds, send_events = False)) + data_source = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = data_source, send_events = False)) value = client.variation('flag2', { 'key': 'user' }, '') assert value == 'value2' finally: os.remove(path) + if client is not None: + client.close() From 2cea73061eaba3d4d7ac812e9fbf9fffb7de5712 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:19:29 -0700 Subject: [PATCH 09/50] readme --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 57aac968..edef13e6 100644 --- a/README.md +++ b/README.md @@ -78,6 +78,10 @@ Supported Python versions ---------- The SDK is tested with the most recent patch releases of Python 2.7, 3.3, 3.4, 3.5, and 3.6. Python 2.6 is no longer supported. +Using flag data from a file +--------------------------- +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.py`](https://github.com/launchdarkly/python-client/blob/master/ldclient/file_data_source.py) for more details. + Learn more ----------- From dcf1afe6f7f1fd1535450a36fd26af18afd5c6af Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:20:42 -0700 Subject: [PATCH 10/50] debugging --- ldclient/file_data_source.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index 09afa263..79d9655f 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -218,10 +218,12 @@ def _start_auto_updater(self): # if available for the current platform. class WatchdogAutoUpdater(object): def __init__(self, resolved_paths, reloader): + print("*** all paths: %s" % resolved_paths) watched_files = set(resolved_paths) class LDWatchdogHandler(watchdog.events.FileSystemEventHandler): def on_any_event(self, event): + print("*** got event: %s" % event.src_path) if event.src_path in watched_files: reloader() @@ -232,6 +234,7 @@ def on_any_event(self, event): self._observer = watchdog.observers.Observer() handler = LDWatchdogHandler() for path in dir_paths: + print("*** watching: %s" % path) self._observer.schedule(handler, path) self._observer.start() From 4e98fdd3f3c0e0ecfbf608643ad17268c92925fa Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:35:03 -0700 Subject: [PATCH 11/50] debugging --- testing/test_file_data_source.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 4fa16bff..7e565c17 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -194,8 +194,14 @@ def do_auto_update_test(options): assert len(store.all(SEGMENTS, lambda x: x)) == 0 time.sleep(0.5) replace_file(path, segment_only_json) - time.sleep(0.5) - assert len(store.all(SEGMENTS, lambda x: x)) == 1 + print("*** modified file %s" % path) + deadline = time.time() + 10 + while time.time() < deadline: + time.sleep(0.1) + if len(store.all(SEGMENTS, lambda x: x)) == 1: + return + print("*** checked") + assert False, "Flags were not reloaded after 10 seconds" finally: os.remove(path) From 8f3c2217805da177d412a6a5543982ad3e118ca6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:41:52 -0700 Subject: [PATCH 12/50] debugging --- ldclient/file_data_source.py | 2 ++ testing/test_file_data_source.py | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index 79d9655f..c1be6974 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -239,6 +239,7 @@ def on_any_event(self, event): self._observer.start() def stop(self): + print("*** stopping observer") self._observer.stop() self._observer.join() @@ -253,6 +254,7 @@ def __init__(self, resolved_paths, reloader, interval): self._timer.start() def stop(self): + print("*** stopping polling") self._timer.stop() def _poll(self): diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 7e565c17..e62fff62 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -199,16 +199,19 @@ def do_auto_update_test(options): while time.time() < deadline: time.sleep(0.1) if len(store.all(SEGMENTS, lambda x: x)) == 1: + print("*** success on %s" % path) return - print("*** checked") + print("*** checked %s" % path) assert False, "Flags were not reloaded after 10 seconds" finally: os.remove(path) def test_reloads_modified_file_if_auto_update_is_on(): + print("*** with watchdog") do_auto_update_test({ 'auto_update': True }) def test_reloads_modified_file_in_polling_mode(): + print("*** with polling") do_auto_update_test({ 'auto_update': True, 'force_polling': True, 'poll_interval': 0.1 }) def test_evaluates_full_flag_with_client_as_expected(): From 84276ddc908a1de2fae9922aaaf538d5eac560a1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:48:26 -0700 Subject: [PATCH 13/50] fix cleanup logic --- ldclient/file_data_source.py | 3 +- testing/test_file_data_source.py | 53 +++++++++++++++++--------------- 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index c1be6974..0b51cfdd 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -133,6 +133,7 @@ def __init__(self, store, options, ready): if isinstance(self._paths, six.string_types): self._paths = [ self._paths ] self._auto_update = options.get('auto_update', False) + self._auto_updater = None self._poll_interval = options.get('poll_interval', 1) self._force_polling = options.get('force_polling', False) # used only in tests @@ -141,8 +142,6 @@ def start(self): if self._auto_update: self._auto_updater = self._start_auto_updater() - else: - self._auto_updater = None # We will signal readiness immediately regardless of whether the file load succeeded or failed - # the difference can be detected by checking initialized() diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index e62fff62..8a8f5d5a 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -92,6 +92,11 @@ def teardown_function(): if data_source is not None: data_source.stop() +def make_data_source(**kwargs): + global data_source + data_source = FileDataSource.factory(**kwargs)(Config(), store, ready) + return data_source + def make_temp_file(content): f, path = tempfile.mkstemp() os.write(f, six.b(content)) @@ -105,9 +110,9 @@ def replace_file(path, content): def test_does_not_load_data_prior_to_start(): path = make_temp_file('{"flagValues":{"key":"value"}}') try: - data_source = FileDataSource.factory(paths = path)(Config(), store, ready) + source = make_data_source(paths = path) assert ready.is_set() is False - assert data_source.initialized() is False + assert source.initialized() is False assert store.initialized is False finally: os.remove(path) @@ -115,8 +120,8 @@ def test_does_not_load_data_prior_to_start(): def test_loads_flags_on_start_from_json(): path = make_temp_file(all_properties_json) try: - data_source = FileDataSource.factory(paths = path)(Config(), store, ready) - data_source.start() + source = make_data_source(paths = path) + source.start() assert store.initialized is True assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys finally: @@ -125,8 +130,8 @@ def test_loads_flags_on_start_from_json(): def test_loads_flags_on_start_from_yaml(): path = make_temp_file(all_properties_yaml) try: - data_source = FileDataSource.factory(paths = path)(Config(), store, ready) - data_source.start() + source = make_data_source(paths = path) + source.start() assert store.initialized is True assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys finally: @@ -135,26 +140,26 @@ def test_loads_flags_on_start_from_yaml(): def test_sets_ready_event_and_initialized_on_successful_load(): path = make_temp_file(all_properties_json) try: - data_source = FileDataSource.factory(paths = path)(Config(), store, ready) - data_source.start() - assert data_source.initialized() is True + source = make_data_source(paths = path) + source.start() + assert source.initialized() is True assert ready.is_set() is True finally: os.remove(path) def test_sets_ready_event_and_does_not_set_initialized_on_unsuccessful_load(): bad_file_path = 'no-such-file' - data_source = FileDataSource.factory(paths = bad_file_path)(Config(), store, ready) - data_source.start() - assert data_source.initialized() is False + source = make_data_source(paths = bad_file_path) + source.start() + assert source.initialized() is False assert ready.is_set() is True def test_can_load_multiple_files(): path1 = make_temp_file(flag_only_json) path2 = make_temp_file(segment_only_json) try: - data_source = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) - data_source.start() + source = make_data_source(paths = [ path1, path2 ]) + source.start() assert len(store.all(FEATURES, lambda x: x)) == 1 assert len(store.all(SEGMENTS, lambda x: x)) == 1 finally: @@ -165,8 +170,8 @@ def test_does_not_allow_duplicate_keys(): path1 = make_temp_file(flag_only_json) path2 = make_temp_file(flag_only_json) try: - data_source = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) - data_source.start() + source = make_data_source(paths = [ path1, path2 ]) + source.start() assert len(store.all(FEATURES, lambda x: x)) == 0 finally: os.remove(path1) @@ -175,8 +180,8 @@ def test_does_not_allow_duplicate_keys(): def test_does_not_reload_modified_file_if_auto_update_is_off(): path = make_temp_file(flag_only_json) try: - data_source = FileDataSource.factory(paths = path)(Config(), store, ready) - data_source.start() + source = make_data_source(paths = path) + source.start() assert len(store.all(SEGMENTS, lambda x: x)) == 0 time.sleep(0.5) replace_file(path, segment_only_json) @@ -189,8 +194,8 @@ def do_auto_update_test(options): path = make_temp_file(flag_only_json) options['paths'] = path try: - data_source = FileDataSource.factory(**options)(Config(), store, ready) - data_source.start() + source = make_data_source(**options) + source.start() assert len(store.all(SEGMENTS, lambda x: x)) == 0 time.sleep(0.5) replace_file(path, segment_only_json) @@ -217,8 +222,8 @@ def test_reloads_modified_file_in_polling_mode(): def test_evaluates_full_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - data_source = FileDataSource.factory(paths = path) - client = LDClient(config=Config(update_processor_class = data_source, send_events = False)) + factory = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = factory, send_events = False)) value = client.variation('flag1', { 'key': 'user' }, '') assert value == 'on' finally: @@ -229,8 +234,8 @@ def test_evaluates_full_flag_with_client_as_expected(): def test_evaluates_simplified_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - data_source = FileDataSource.factory(paths = path) - client = LDClient(config=Config(update_processor_class = data_source, send_events = False)) + factory = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = factory, send_events = False)) value = client.variation('flag2', { 'key': 'user' }, '') assert value == 'value2' finally: From 2a822e6e82a1e8dffcdfd59d183d43219dff391c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:50:31 -0700 Subject: [PATCH 14/50] rm debugging --- ldclient/file_data_source.py | 5 ----- testing/test_file_data_source.py | 5 ----- 2 files changed, 10 deletions(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index 0b51cfdd..c4013a52 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -217,12 +217,10 @@ def _start_auto_updater(self): # if available for the current platform. class WatchdogAutoUpdater(object): def __init__(self, resolved_paths, reloader): - print("*** all paths: %s" % resolved_paths) watched_files = set(resolved_paths) class LDWatchdogHandler(watchdog.events.FileSystemEventHandler): def on_any_event(self, event): - print("*** got event: %s" % event.src_path) if event.src_path in watched_files: reloader() @@ -233,12 +231,10 @@ def on_any_event(self, event): self._observer = watchdog.observers.Observer() handler = LDWatchdogHandler() for path in dir_paths: - print("*** watching: %s" % path) self._observer.schedule(handler, path) self._observer.start() def stop(self): - print("*** stopping observer") self._observer.stop() self._observer.join() @@ -253,7 +249,6 @@ def __init__(self, resolved_paths, reloader, interval): self._timer.start() def stop(self): - print("*** stopping polling") self._timer.stop() def _poll(self): diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 8a8f5d5a..68d1e5b7 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -199,24 +199,19 @@ def do_auto_update_test(options): assert len(store.all(SEGMENTS, lambda x: x)) == 0 time.sleep(0.5) replace_file(path, segment_only_json) - print("*** modified file %s" % path) deadline = time.time() + 10 while time.time() < deadline: time.sleep(0.1) if len(store.all(SEGMENTS, lambda x: x)) == 1: - print("*** success on %s" % path) return - print("*** checked %s" % path) assert False, "Flags were not reloaded after 10 seconds" finally: os.remove(path) def test_reloads_modified_file_if_auto_update_is_on(): - print("*** with watchdog") do_auto_update_test({ 'auto_update': True }) def test_reloads_modified_file_in_polling_mode(): - print("*** with polling") do_auto_update_test({ 'auto_update': True, 'force_polling': True, 'poll_interval': 0.1 }) def test_evaluates_full_flag_with_client_as_expected(): From ac5e8de65036434ee0be93dee64c7179a9200b50 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 13 Nov 2018 20:39:44 -0800 Subject: [PATCH 15/50] typo in comment --- ldclient/file_data_source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index c4013a52..ebff765b 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -37,7 +37,7 @@ def factory(cls, **kwargs): to `factory`, set `paths` to the file path(s) of your data file(s): :: - factory = FileDataSource.factory(paths: [ myFilePath ]) + factory = FileDataSource.factory(paths = [ myFilePath ]) config = Config(update_processor_class = factory) This will cause the client not to connect to LaunchDarkly to get feature flags. The From 040ced945495c896db7e6eb0a5f259710f2e7113 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 29 Dec 2018 13:27:13 -0800 Subject: [PATCH 16/50] add feature store wrapper class and make Redis feature store use it --- ldclient/feature_store.py | 47 +++++ ldclient/feature_store_helpers.py | 103 +++++++++ ldclient/integrations.py | 31 +++ ldclient/interfaces.py | 145 +++++++++++-- ldclient/redis_feature_store.py | 124 +++++------ testing/test_feature_store.py | 13 +- testing/test_feature_store_helpers.py | 287 ++++++++++++++++++++++++++ 7 files changed, 649 insertions(+), 101 deletions(-) create mode 100644 ldclient/feature_store_helpers.py create mode 100644 ldclient/integrations.py create mode 100644 testing/test_feature_store_helpers.py diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 155743ea..e4d2f667 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -4,6 +4,53 @@ from ldclient.rwlock import ReadWriteLock +class CacheConfig: + """Encapsulates caching parameters for feature store implementations that support local caching. + """ + + DEFAULT_EXPIRATION = 15 + DEFAULT_CAPACITY = 1000 + + def __init__(self, + expiration = DEFAULT_EXPIRATION, + capacity = DEFAULT_CAPACITY): + """Constructs an instance of CacheConfig. + :param float expiration: The cache TTL, in seconds. Items will be evicted from the cache after + this amount of time from the time when they were originally cached. If the time is less than or + equal to zero, caching is disabled. + :param int capacity: The maximum number of items that can be in the cache at a time. + """ + self._expiration = expiration + self._capacity = capacity + + @staticmethod + def default(): + """Returns an instance of CacheConfig with default properties. By default, caching is enabled. + This is the same as calling the constructor with no parameters. + :rtype: CacheConfig + """ + return CacheConfig() + + @staticmethod + def disabled(): + """Returns an instance of CacheConfig specifying that caching should be disabled. + :rtype: CacheConfig + """ + return CacheConfig(expiration = 0) + + @property + def enabled(self): + return self._expiration > 0 + + @property + def expiration(self): + return self._expiration + + @property + def capacity(self): + return self._capacity + + class InMemoryFeatureStore(FeatureStore): """ In-memory implementation of a store that holds feature flags and related data received from the streaming API. diff --git a/ldclient/feature_store_helpers.py b/ldclient/feature_store_helpers.py new file mode 100644 index 00000000..d8359274 --- /dev/null +++ b/ldclient/feature_store_helpers.py @@ -0,0 +1,103 @@ +from expiringdict import ExpiringDict + +from ldclient.interfaces import FeatureStore + + +class CachingStoreWrapper(FeatureStore): + """CachingStoreWrapper is a partial implementation of :class:ldclient.interfaces.FeatureStore that + delegates the basic functionality to an implementation of :class:ldclient.interfaces.FeatureStoreCore - + while adding optional caching behavior and other logic that would otherwise be repeated in every + feature store implementation. This makes it easier to create new database integrations by implementing + only the database-specific logic. + """ + __INITED_CACHE_KEY__ = "$inited" + + def __init__(self, core, cache_config): + self._core = core + if cache_config.enabled: + self._cache = ExpiringDict(max_len=cache_config.capacity, max_age_seconds=cache_config.expiration) + else: + self._cache = None + self._inited = False + + def init(self, all_data): + self._core.init_internal(all_data) + if self._cache is not None: + self._cache.clear() + for kind, items in all_data.items(): + self._cache[self._all_cache_key(kind)] = self._items_if_not_deleted(items) + for key, item in items.items(): + self._cache[self._item_cache_key(kind, key)] = [item] # note array wrapper + self._inited = True + + def get(self, kind, key, callback=lambda x: x): + if self._cache is not None: + cache_key = self._item_cache_key(kind, key) + cached_item = self._cache.get(cache_key) + # note, cached items are wrapped in an array so we can cache None values + if cached_item is not None: + return callback(self._item_if_not_deleted(cached_item[0])) + item = self._core.get_internal(kind, key) + if self._cache is not None: + self._cache[cache_key] = [item] + return callback(self._item_if_not_deleted(item)) + + def all(self, kind, callback): + if self._cache is not None: + cache_key = self._all_cache_key(kind) + cached_items = self._cache.get(cache_key) + if cached_items is not None: + return callback(cached_items) + items = self._items_if_not_deleted(self._core.get_all_internal(kind)) + if self._cache is not None: + self._cache[cache_key] = items + return callback(items) + + def delete(self, kind, key, version): + deleted_item = { "key": key, "version": version, "deleted": True } + self.upsert(kind, deleted_item) + + def upsert(self, kind, item): + new_state = self._core.upsert_internal(kind, item) + if self._cache is not None: + self._cache[self._item_cache_key(kind, item.get('key'))] = [new_state] + self._cache.pop(self._all_cache_key(kind), None) + + @property + def initialized(self): + if self._inited: + return True + if self._cache is None: + result = self._core.initialized_internal() + else: + result = self._cache.get(CachingStoreWrapper.__INITED_CACHE_KEY__) + if result is None: + result = self._core.initialized_internal() + self._cache[CachingStoreWrapper.__INITED_CACHE_KEY__] = result + if result: + self._inited = True + return result + + @staticmethod + def _item_cache_key(kind, key): + return "{0}:{1}".format(kind.namespace, key) + + @staticmethod + def _all_cache_key(kind): + return kind.namespace + + @staticmethod + def _item_if_not_deleted(item): + if item is not None and item.get('deleted', False): + return None + return item + + @staticmethod + def _items_if_not_deleted(items): + results = {} + if items is not None: + for key, item in items.items(): + if not item.get('deleted', False): + results[key] = item + return results + \ No newline at end of file diff --git a/ldclient/integrations.py b/ldclient/integrations.py new file mode 100644 index 00000000..a82783be --- /dev/null +++ b/ldclient/integrations.py @@ -0,0 +1,31 @@ +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.redis_feature_store import _RedisFeatureStoreCore + + +class Redis(object): + """Provides factory methods for integrations between the LaunchDarkly SDK and Redis, + """ + DEFAULT_URL = 'redis://localhost:6379/0' + DEFAULT_PREFIX = 'launchdarkly' + DEFAULT_MAX_CONNECTIONS = 16 + + @staticmethod + def new_feature_store(url=Redis.DEFAULT_URL, + prefix=Redis.DEFAULT_PREFIX, + max_connections=Redis.DEFAULT_MAX_CONNECTIONS, + caching=CacheConfig.default()): + """Creates a Redis-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + + :param string url: The URL of the Redis host; defaults to `DEFAULT_URL` + :param string prefix: A namespace prefix to be prepended to all Redis keys; defaults to + `DEFAULT_PREFIX` + :param int max_connections: The maximum number of Redis connections to keep in the + connection pool; defaults to `DEFAULT_MAX_CONNECTIONS` + :param CacheConfig caching: Specifies whether local caching should be enabled and if so, + sets the cache properties; defaults to `CacheConfig.default()` + """ + core = _RedisFeatureStoreCore(url, prefix, max_connections) + wrapper = CachingStoreWrapper(core, caching) + wrapper.core = core # exposed for testing + return wrapper diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 39898408..2710fa25 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -3,64 +3,86 @@ class FeatureStore(object): """ - Stores and retrieves the state of feature flags and related data + A versioned store for feature flags and related objects received from LaunchDarkly. + Implementations should permit concurrent access and updates. + + An "object", for `FeatureStore`, is simply a dict of arbitrary data which must have at least + three properties: "key" (its unique key), "version" (the version number provided by + LaunchDarkly), and "deleted" (True if this is a placeholder for a deleted object). + + Delete and upsert requests are versioned-- if the version number in the request is less than + the currently stored version of the object, the request should be ignored. + + These semantics support the primary use case for the store, which synchronizes a collection + of objects based on update messages that may be received out-of-order. """ __metaclass__ = ABCMeta @abstractmethod def get(self, kind, key, callback): """ - Gets a feature and calls the callback with the feature data to return the result - :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + Retrieves the object to which the specified key is mapped, or None if the key is not found + or the associated object has a "deleted" property of True. The retrieved object, if any (a + dict) can be transformed by the specified callback. + + :param kind: The kind of object to get :type kind: VersionedDataKind - :param key: The key of the object + :param key: The key whose associated object is to be returned :type key: str - :param callback: The function that accepts the retrieved data and returns a transformed value - :type callback: Function that processes the retrieved object once received. - :return: The result of executing callback. + :param callback: A function that accepts the retrieved data and returns a transformed value + :type callback: function + :return: The result of executing callback """ @abstractmethod - def all(self, callback): + def all(self, kind, callback): """ - Returns all feature flags and their data - :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + Retrieves a dictionary of all associated objects of a given kind. The retrieved dict of keys + to objects can be transformed by the specified callback. + + :param kind: The kind of objects to get :type kind: VersionedDataKind - :param callback: The function that accepts the retrieved data and returns a transformed value - :type callback: Function that processes the retrieved objects once received. - :rtype: The result of executing callback. + :param callback: A function that accepts the retrieved data and returns a transformed value + :type callback: function + :rtype: The result of executing callback """ @abstractmethod def init(self, all_data): """ - Initializes the store with a set of objects. Meant to be called by the UpdateProcessor + Initializes (or re-initializes) the store with the specified set of objects. Any existing entries + will be removed. Implementations can assume that this set of objects is up to date-- there is no + need to perform individual version comparisons between the existing objects and the supplied data. - :param all_data: The features and their data as provided by LD + :param all_data: All objects to be stored :type all_data: dict[VersionedDataKind, dict[str, dict]] """ @abstractmethod def delete(self, kind, key, version): """ - Marks an object as deleted + Deletes the object associated with the specified key, if it exists and its version is less than + the specified version. The object should be replaced in the data store by a + placeholder with the specified version and a "deleted" property of TErue. - :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + :param kind: The kind of object to delete :type kind: VersionedDataKind - :param key: The object key + :param key: The key of the object to be deleted :type key: str - :param version: The version of the object to mark as deleted + :param version: The version for the delete operation :type version: int """ @abstractmethod def upsert(self, kind, item): """ - Inserts an object if its version is newer or missing + Updates or inserts the object associated with the specified key. If an item with the same key + already exists, it should update it only if the new item's version property is greater than + the old one. - :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + :param kind: The kind of object to update :type kind: VersionedDataKind - :param item: The object to be inserted or updated - must have key and version properties + :param item: The object to update or insert :type feature: dict """ @@ -73,6 +95,85 @@ def initialized(self): """ +class FeatureStoreCore(object): + """ + `FeatureStoreCore` is an interface for a simplified subset of the functionality of :class:`FeatureStore`, + to be used in conjunction with :class:`feature_store_helpers.CachingStoreWrapper`. This allows developers + developers of custom `FeatureStore` implementations to avoid repeating logic that would + commonly be needed in any such implementation, such as caching. Instead, they can implement + only `FeatureStoreCore` and then create a `CachingStoreWrapper`. + """ + __metaclass__ = ABCMeta + + @abstractmethod + def get_internal(self, kind, key): + """ + Returns the object to which the specified key is mapped, or None if no such item exists. + The method should not attempt to filter out any items based on their deleted property, + nor to cache any items. + + :param kind: The kind of object to get + :type kind: VersionedDataKind + :param key: The key of the object + :type key: str + :return: The object to which the specified key is mapped, or None + :rtype: dict + """ + + @abstractmethod + def get_all_internal(self, callback): + """ + Returns a dictionary of all associated objects of a given kind. The method should not attempt + to filter out any items based on their deleted property, nor to cache any items. + + :param kind: The kind of objects to get + :type kind: VersionedDataKind + :return: A dictionary of keys to items + :rtype: dict[str, dict] + """ + + @abstractmethod + def init_internal(self, all_data): + """ + Initializes (or re-initializes) the store with the specified set of objects. Any existing entries + will be removed. Implementations can assume that this set of objects is up to date-- there is no + need to perform individual version comparisons between the existing objects and the supplied + data. + + :param all_data: A dictionary of data kinds to item collections + :type all_data: dict[VersionedDataKind, dict[str, dict]] + """ + + @abstractmethod + def upsert_internal(self, kind, item): + """ + Updates or inserts the object associated with the specified key. If an item with the same key + already exists, it should update it only if the new item's version property is greater than + the old one. It should return the final state of the item, i.e. if the update succeeded then + it returns the item that was passed in, and if the update failed due to the version check + then it returns the item that is currently in the data store (this ensures that + `CachingStoreWrapper` will update the cache correctly). + + :param kind: The kind of object to update + :type kind: VersionedDataKind + :param item: The object to update or insert + :type item: dict + :return: The state of the object after the update + :rtype: dict + """ + + @abstractmethod + def initialized_internal(self): + """ + Returns true if this store has been initialized. In a shared data store, it should be able to + detect this even if initInternal was called in a different process, i.e. the test should be + based on looking at what is in the data store. The method does not need to worry about caching + this value; `CachingStoreWrapper` will only call it when necessary. + + :rtype: bool + """ + + class BackgroundOperation(object): """ Performs a task in the background diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index 71b7261b..b9bdf731 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -1,21 +1,20 @@ import json -from pprint import pprint -from expiringdict import ExpiringDict import redis from ldclient import log -from ldclient.interfaces import FeatureStore -from ldclient.memoized_value import MemoizedValue +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.interfaces import FeatureStore, FeatureStoreCore from ldclient.versioned_data_kind import FEATURES -class ForgetfulDict(dict): - def __setitem__(self, key, value): - pass - - class RedisFeatureStore(FeatureStore): + """A Redis-backed implementation of :class:`ldclient.feature_store.FeatureStore`. + + This implementation class is deprecated and may be changed or removed in the future. Please use + :func:`ldclient.integrations.Redis.new_feature_store()`. + """ def __init__(self, url='redis://localhost:6379/0', prefix='launchdarkly', @@ -23,23 +22,42 @@ def __init__(self, expiration=15, capacity=1000): + self.core = _RedisFeatureStoreCore(url, prefix, max_connections) # exposed for testing + self._wrapper = CachingStoreWrapper(self.core, CacheConfig(expiration=expiration, capacity=capacity)) + + def get(self, kind, key, callback = lambda x: x): + return self._wrapper.get(kind, key, callback) + + def all(self, kind, callback): + return self._wrapper.all(kind, callback) + + def init(self, all_data): + return self._wrapper.init(all_data) + + def upsert(self, kind, item): + return self._wrapper.upsert(kind, item) + + def delete(self, kind, key, version): + return self._wrapper.delete(kind, key, version) + + @property + def initialized(self): + return self._wrapper.initialized + + +class _RedisFeatureStoreCore(FeatureStoreCore): + def __init__(self, url, prefix, max_connections): self._prefix = prefix - self._cache = ForgetfulDict() if expiration == 0 else ExpiringDict(max_len=capacity, - max_age_seconds=expiration) self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) - self._inited = MemoizedValue(lambda: self._query_init()) + self.test_update_hook = None # exposed for testing log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + prefix) def _items_key(self, kind): return "{0}:{1}".format(self._prefix, kind.namespace) - def _cache_key(self, kind, key): - return "{0}:{1}".format(kind.namespace, key) - - def init(self, all_data): + def init_internal(self, all_data): pipe = redis.Redis(connection_pool=self._pool).pipeline() - self._cache.clear() all_count = 0 for kind, items in all_data.items(): @@ -48,53 +66,30 @@ def init(self, all_data): for key, item in items.items(): item_json = json.dumps(item) pipe.hset(base_key, key, item_json) - self._cache[self._cache_key(kind, key)] = item all_count = all_count + len(items) - try: - pipe.execute() - except: - self._cache.clear() - raise + pipe.execute() log.info("Initialized RedisFeatureStore with %d items", all_count) - self._inited.set(True) - def all(self, kind, callback): + def get_all_internal(self, kind): r = redis.Redis(connection_pool=self._pool) try: all_items = r.hgetall(self._items_key(kind)) except BaseException as e: log.error("RedisFeatureStore: Could not retrieve '%s' from Redis with error: %s. Returning None.", kind.namespace, e) - return callback(None) + return None if all_items is None or all_items is "": log.warn("RedisFeatureStore: call to get all '%s' returned no results. Returning None.", kind.namespace) - return callback(None) + return None results = {} for key, item_json in all_items.items(): key = key.decode('utf-8') # necessary in Python 3 - item = json.loads(item_json.decode('utf-8')) - if item.get('deleted', False) is False: - results[key] = item - return callback(results) - - def get(self, kind, key, callback=lambda x: x): - item = self._get_even_if_deleted(kind, key, check_cache=True) - if item is not None and item.get('deleted', False) is True: - log.debug("RedisFeatureStore: get returned deleted item %s in '%s'. Returning None.", key, kind.namespace) - return callback(None) - return callback(item) - - def _get_even_if_deleted(self, kind, key, check_cache = True): - cacheKey = self._cache_key(kind, key) - if check_cache: - item = self._cache.get(cacheKey) - if item is not None: - # reset ttl - self._cache[cacheKey] = item - return item + results[key] = json.loads(item_json.decode('utf-8')) + return results + def get_internal(self, kind, key): try: r = redis.Redis(connection_pool=self._pool) item_json = r.hget(self._items_key(kind), key) @@ -107,26 +102,9 @@ def _get_even_if_deleted(self, kind, key, check_cache = True): log.debug("RedisFeatureStore: key %s not found in '%s'. Returning None.", key, kind.namespace) return None - item = json.loads(item_json.decode('utf-8')) - self._cache[cacheKey] = item - return item - - def delete(self, kind, key, version): - deleted_item = { "key": key, "version": version, "deleted": True } - self._update_with_versioning(kind, deleted_item) - - def upsert(self, kind, item): - self._update_with_versioning(kind, item) - - @property - def initialized(self): - return self._inited.get() - - def _query_init(self): - r = redis.Redis(connection_pool=self._pool) - return r.exists(self._items_key(FEATURES)) + return json.loads(item_json.decode('utf-8')) - def _update_with_versioning(self, kind, item): + def upsert_internal(self, kind, item): r = redis.Redis(connection_pool=self._pool) base_key = self._items_key(kind) key = item['key'] @@ -135,14 +113,15 @@ def _update_with_versioning(self, kind, item): while True: pipeline = r.pipeline() pipeline.watch(base_key) - old = self._get_even_if_deleted(kind, key, check_cache=False) - self._before_update_transaction(base_key, key) + old = self.get_internal(kind, key) + if self.test_update_hook is not None: + self.test_update_hook(base_key, key) if old and old['version'] >= item['version']: log.debug('RedisFeatureStore: Attempted to %s key: %s version %d with a version that is the same or older: %d in "%s"', 'delete' if item.get('deleted') else 'update', key, old['version'], item['version'], kind.namespace) pipeline.unwatch() - break + return old else: pipeline.multi() pipeline.hset(base_key, key, item_json) @@ -153,8 +132,11 @@ def _update_with_versioning(self, kind, item): except redis.exceptions.WatchError: log.debug("RedisFeatureStore: concurrent modification detected, retrying") continue - self._cache[self._cache_key(kind, key)] = item - break + return item + + def initialized_internal(self): + r = redis.Redis(connection_pool=self._pool) + return r.exists(self._items_key(FEATURES)) def _before_update_transaction(self, base_key, key): # exposed for testing diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 245341ec..b8696529 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -1,5 +1,4 @@ import json -from mock import patch import pytest import redis @@ -133,8 +132,7 @@ def test_upsert_older_version_after_delete(self, store): class TestRedisFeatureStoreExtraTests: - @patch.object(RedisFeatureStore, '_before_update_transaction') - def test_upsert_race_condition_against_external_client_with_higher_version(self, mock_method): + def test_upsert_race_condition_against_external_client_with_higher_version(self): other_client = redis.StrictRedis(host='localhost', port=6379, db=0) store = RedisFeatureStore() store.init({ FEATURES: {} }) @@ -144,7 +142,7 @@ def hook(base_key, key): if other_version['version'] <= 4: other_client.hset(base_key, key, json.dumps(other_version)) other_version['version'] = other_version['version'] + 1 - mock_method.side_effect = hook + store.core.test_update_hook = hook feature = { u'key': 'flagkey', u'version': 1 } @@ -152,8 +150,7 @@ def hook(base_key, key): result = store.get(FEATURES, 'flagkey', lambda x: x) assert result['version'] == 2 - @patch.object(RedisFeatureStore, '_before_update_transaction') - def test_upsert_race_condition_against_external_client_with_lower_version(self, mock_method): + def test_upsert_race_condition_against_external_client_with_lower_version(self): other_client = redis.StrictRedis(host='localhost', port=6379, db=0) store = RedisFeatureStore() store.init({ FEATURES: {} }) @@ -163,7 +160,7 @@ def hook(base_key, key): if other_version['version'] <= 4: other_client.hset(base_key, key, json.dumps(other_version)) other_version['version'] = other_version['version'] + 1 - mock_method.side_effect = hook + store.core.test_update_hook = hook feature = { u'key': 'flagkey', u'version': 5 } @@ -186,7 +183,7 @@ def test_exception_is_handled_in_all(self, caplog): # This just verifies the fix for a bug that caused an error during exception handling in Python 3 store = RedisFeatureStore(url='redis://bad') all = store.all(FEATURES, lambda x: x) - assert all is None + assert all == {} loglines = get_log_lines(caplog) assert len(loglines) == 2 message = loglines[1].message diff --git a/testing/test_feature_store_helpers.py b/testing/test_feature_store_helpers.py new file mode 100644 index 00000000..01bb245a --- /dev/null +++ b/testing/test_feature_store_helpers.py @@ -0,0 +1,287 @@ +import pytest +from time import sleep + +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.versioned_data_kind import VersionedDataKind + +THINGS = VersionedDataKind(namespace = "things", request_api_path = "", stream_api_path = "") +WRONG_THINGS = VersionedDataKind(namespace = "wrong", request_api_path = "", stream_api_path = "") + +def make_wrapper(core, cached): + return CachingStoreWrapper(core, CacheConfig(expiration=30) if cached else CacheConfig.disabled()) + +class MockCore: + def __init__(self): + self.data = {} + self.inited = False + self.inited_query_count = 0 + + def init_internal(self, all_data): + self.data = {} + for kind, items in all_data.items(): + self.data[kind] = items.copy() + + def get_internal(self, kind, key): + items = self.data.get(kind) + return None if items is None else items.get(key) + + def get_all_internal(self, kind): + return self.data.get(kind) + + def upsert_internal(self, kind, item): + key = item.get('key') + items = self.data.get(kind) + if items is None: + items = {} + self.data[kind] = items + old_item = items.get(key) + if old_item is None or old_item.get('version') < item.get('version'): + items[key] = item + return item + return old_item + + def initialized_internal(self): + self.inited_query_count = self.inited_query_count + 1 + return self.inited + + def force_set(self, kind, item): + items = self.data.get(kind) + if items is None: + items = {} + self.data[kind] = items + items[item.get('key')] = item + + def force_remove(self, kind, key): + items = self.data.get(kind) + if items is not None: + items.pop(key, None) + +class TestCachingStoreWrapper: + @pytest.mark.parametrize("cached", [False, True]) + def test_get_item(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + itemv1 = { "key": key, "version": 1 } + itemv2 = { "key": key, "version": 2 } + + core.force_set(THINGS, itemv1) + assert wrapper.get(THINGS, key) == itemv1 + + core.force_set(THINGS, itemv2) + assert wrapper.get(THINGS, key) == (itemv1 if cached else itemv2) # if cached, we will not see the new underlying value yet + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_deleted_item(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + itemv1 = { "key": key, "version": 1, "deleted": True } + itemv2 = { "key": key, "version": 2 } + + core.force_set(THINGS, itemv1) + assert wrapper.get(THINGS, key) is None # item is filtered out because deleted is true + + core.force_set(THINGS, itemv2) + assert wrapper.get(THINGS, key) == (None if cached else itemv2) # if cached, we will not see the new underlying value yet + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_missing_item(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + item = { "key": key, "version": 1 } + + assert wrapper.get(THINGS, key) is None + + core.force_set(THINGS, item) + assert wrapper.get(THINGS, key) == (None if cached else item) # the cache can retain a nil result + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_with_lambda(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + item = { "key": key, "version": 1 } + modified_item = { "key": key, "version": 99 } + + core.force_set(THINGS, item) + assert wrapper.get(THINGS, key, lambda x: modified_item) == modified_item + + def test_cached_get_uses_values_from_init(self): + core = MockCore() + wrapper = make_wrapper(core, True) + item1 = { "key": "flag1", "version": 1 } + item2 = { "key": "flag2", "version": 1 } + + wrapper.init({ THINGS: { item1["key"]: item1, item2["key"]: item2 } }) + core.force_remove(THINGS, item1["key"]) + assert wrapper.get(THINGS, item1["key"]) == item1 + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_all(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + item1 = { "key": "flag1", "version": 1 } + item2 = { "key": "flag2", "version": 1 } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1, item2["key"]: item2 } + + core.force_remove(THINGS, item2["key"]) + if cached: + assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1, item2["key"]: item2 } + else: + assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1 } + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_all_removes_deleted_items(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + item1 = { "key": "flag1", "version": 1 } + item2 = { "key": "flag2", "version": 1, "deleted": True } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1 } + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_all_changes_None_to_empty_dict(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + + assert wrapper.all(WRONG_THINGS, lambda x:x) == {} + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_all_iwith_lambda(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + extra = { "extra": True } + item1 = { "key": "flag1", "version": 1 } + item2 = { "key": "flag2", "version": 1 } + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + assert wrapper.all(THINGS, lambda x: dict(x, **extra)) == { + item1["key"]: item1, item2["key"]: item2, "extra": True + } + + def test_cached_get_all_uses_values_from_init(self): + core = MockCore() + wrapper = make_wrapper(core, True) + item1 = { "key": "flag1", "version": 1 } + item2 = { "key": "flag2", "version": 1 } + both = { item1["key"]: item1, item2["key"]: item2 } + + wrapper.init({ THINGS: both }) + core.force_remove(THINGS, item1["key"]) + assert wrapper.all(THINGS, lambda x: x) == both + + @pytest.mark.parametrize("cached", [False, True]) + def test_upsert_successful(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + itemv1 = { "key": key, "version": 1 } + itemv2 = { "key": key, "version": 2 } + + wrapper.upsert(THINGS, itemv1) + assert core.data[THINGS][key] == itemv1 + + wrapper.upsert(THINGS, itemv2) + assert core.data[THINGS][key] == itemv2 + + # if we have a cache, verify that the new item is now cached by writing a different value + # to the underlying data - Get should still return the cached item + if cached: + itemv3 = { "key": key, "version": 3 } + core.force_set(THINGS, itemv3) + + assert wrapper.get(THINGS, key) == itemv2 + + def test_cached_upsert_unsuccessful(self): + # This is for an upsert where the data in the store has a higher version. In an uncached + # store, this is just a no-op as far as the wrapper is concerned so there's nothing to + # test here. In a cached store, we need to verify that the cache has been refreshed + # using the data that was found in the store. + core = MockCore() + wrapper = make_wrapper(core, True) + key = "flag" + itemv1 = { "key": key, "version": 1 } + itemv2 = { "key": key, "version": 2 } + + wrapper.upsert(THINGS, itemv2) + assert core.data[THINGS][key] == itemv2 + + wrapper.upsert(THINGS, itemv1) + assert core.data[THINGS][key] == itemv2 # value in store remains the same + + itemv3 = { "key": key, "version": 3 } + core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache + assert wrapper.get(THINGS, key) == itemv2 + + @pytest.mark.parametrize("cached", [False, True]) + def test_delete(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + itemv1 = { "key": key, "version": 1 } + itemv2 = { "key": key, "version": 2, "deleted": True } + itemv3 = { "key": key, "version": 3 } + + core.force_set(THINGS, itemv1) + assert wrapper.get(THINGS, key) == itemv1 + + wrapper.delete(THINGS, key, 2) + assert core.data[THINGS][key] == itemv2 + + core.force_set(THINGS, itemv3) # make a change that bypasses the cache + assert wrapper.get(THINGS, key) == (None if cached else itemv3) + + def test_uncached_initialized_queries_state_only_until_inited(self): + core = MockCore() + wrapper = make_wrapper(core, False) + + assert wrapper.initialized is False + assert core.inited_query_count == 1 + + core.inited = True + assert wrapper.initialized is True + assert core.inited_query_count == 2 + + core.inited = False + assert wrapper.initialized is True + assert core.inited_query_count == 2 + + def test_uncached_initialized_does_not_query_state_if_init_was_called(self): + core = MockCore() + wrapper = make_wrapper(core, False) + + assert wrapper.initialized is False + assert core.inited_query_count == 1 + + wrapper.init({}) + + assert wrapper.initialized is True + assert core.inited_query_count == 1 + + def test_cached_initialized_can_cache_false_result(self): + core = MockCore() + wrapper = CachingStoreWrapper(core, CacheConfig(expiration=0.2)) # use a shorter cache TTL for this test + + assert wrapper.initialized is False + assert core.inited_query_count == 1 + + core.inited = True + assert wrapper.initialized is False + assert core.inited_query_count == 1 + + sleep(0.5) + + assert wrapper.initialized is True + assert core.inited_query_count == 2 + + # From this point on it should remain true and the method should not be called + assert wrapper.initialized is True + assert core.inited_query_count == 2 From 59a67a844b1650eb7a7600a1d44ca120a8f03a72 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 29 Dec 2018 13:39:42 -0800 Subject: [PATCH 17/50] test the new Redis factory method --- ldclient/integrations.py | 6 +++--- testing/test_feature_store.py | 23 +++++++++++++++++------ 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/ldclient/integrations.py b/ldclient/integrations.py index a82783be..86b5248d 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -11,9 +11,9 @@ class Redis(object): DEFAULT_MAX_CONNECTIONS = 16 @staticmethod - def new_feature_store(url=Redis.DEFAULT_URL, - prefix=Redis.DEFAULT_PREFIX, - max_connections=Redis.DEFAULT_MAX_CONNECTIONS, + def new_feature_store(url='redis://localhost:6379/0', + prefix='launchdarkly', + max_connections=16, caching=CacheConfig.default()): """Creates a Redis-backed implementation of `:class:ldclient.feature_store.FeatureStore`. diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index b8696529..5716fa0e 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -2,7 +2,8 @@ import pytest import redis -from ldclient.feature_store import InMemoryFeatureStore +from ldclient.feature_store import CacheConfig, InMemoryFeatureStore +from ldclient.integrations import Redis from ldclient.redis_feature_store import RedisFeatureStore from ldclient.versioned_data_kind import FEATURES @@ -19,17 +20,27 @@ class TestFeatureStore: redis_host = 'localhost' redis_port = 6379 + def clear_redis_data(self): + r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) + r.delete("launchdarkly:features") + def in_memory(self): return InMemoryFeatureStore() def redis_with_local_cache(self): - r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) - r.delete("launchdarkly:features") - return RedisFeatureStore() + self.clear_redis_data() + return Redis.new_feature_store() def redis_no_local_cache(self): - r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) - r.delete("launchdarkly:features") + self.clear_redis_data() + return Redis.new_feature_store(caching=CacheConfig.disabled()) + + def deprecated_redis_with_local_cache(self): + self.clear_redis_data() + return RedisFeatureStore() + + def deprecated_redis_no_local_cache(self): + self.clear_redis_data() return RedisFeatureStore(expiration=0) params = [in_memory, redis_with_local_cache, redis_no_local_cache] From 1e38ac10afceb7a4b34ada8351e4c9552070f563 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 29 Dec 2018 15:22:39 -0800 Subject: [PATCH 18/50] add DynamoDB support --- .circleci/config.yml | 6 + dynamodb-requirements.txt | 1 + ldclient/dynamodb_feature_store.py | 191 +++++++++++++++++++++++++++++ ldclient/integrations.py | 25 +++- ldclient/redis_feature_store.py | 11 +- test-requirements.txt | 1 + testing/test_feature_store.py | 134 ++++++++++++++++---- 7 files changed, 345 insertions(+), 24 deletions(-) create mode 100644 dynamodb-requirements.txt create mode 100644 ldclient/dynamodb_feature_store.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 05cb973c..92699a3c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -40,28 +40,34 @@ jobs: docker: - image: circleci/python:2.7-jessie - image: redis + - image: amazon/dynamodb-local test-3.3: <<: *test-template docker: - image: circleci/python:3.3-jessie - image: redis + - image: amazon/dynamodb-local test-3.4: <<: *test-template docker: - image: circleci/python:3.4-jessie - image: redis + - image: amazon/dynamodb-local test-3.5: <<: *test-template docker: - image: circleci/python:3.5-jessie - image: redis + - image: amazon/dynamodb-local test-3.6: <<: *test-template docker: - image: circleci/python:3.6-jessie - image: redis + - image: amazon/dynamodb-local test-3.7: <<: *test-template docker: - image: circleci/python:3.7-stretch - image: redis + - image: amazon/dynamodb-local diff --git a/dynamodb-requirements.txt b/dynamodb-requirements.txt new file mode 100644 index 00000000..b72b66b6 --- /dev/null +++ b/dynamodb-requirements.txt @@ -0,0 +1 @@ +boto3>=1.9.71 diff --git a/ldclient/dynamodb_feature_store.py b/ldclient/dynamodb_feature_store.py new file mode 100644 index 00000000..f3879d71 --- /dev/null +++ b/ldclient/dynamodb_feature_store.py @@ -0,0 +1,191 @@ +import json + +have_dynamodb = False +try: + import boto3 + have_dynamodb = True +except ImportError: + pass + +from ldclient import log +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.interfaces import FeatureStore, FeatureStoreCore + +# +# Internal implementation of the DynamoDB feature store. +# +# Implementation notes: +# +# * Feature flags, segments, and any other kind of entity the LaunchDarkly client may wish +# to store, are all put in the same table. The only two required attributes are "key" (which +# is present in all storeable entities) and "namespace" (a parameter from the client that is +# used to disambiguate between flags and segments). +# +# * Because of DynamoDB's restrictions on attribute values (e.g. empty strings are not +# allowed), the standard DynamoDB marshaling mechanism with one attribute per object property +# is not used. Instead, the entire object is serialized to JSON and stored in a single +# attribute, "item". The "version" property is also stored as a separate attribute since it +# is used for updates. +# +# * Since DynamoDB doesn't have transactions, the init() method - which replaces the entire data +# store - is not atomic, so there can be a race condition if another process is adding new data +# via upsert(). To minimize this, we don't delete all the data at the start; instead, we update +# the items we've received, and then delete all other items. That could potentially result in +# deleting new data from another process, but that would be the case anyway if the init() +# happened to execute later than the upsert(); we are relying on the fact that normally the +# process that did the init() will also receive the new data shortly and do its own upsert(). +# +# * DynamoDB has a maximum item size of 400KB. Since each feature flag or user segment is +# stored as a single item, this mechanism will not work for extremely large flags or segments. +# + +class _DynamoDBFeatureStoreCore(FeatureStoreCore): + PARTITION_KEY = 'namespace' + SORT_KEY = 'key' + VERSION_ATTRIBUTE = 'version' + ITEM_JSON_ATTRIBUTE = 'item' + + def __init__(self, table_name, prefix, dynamodb_opts): + if not have_dynamodb: + raise NotImplementedError("Cannot use DynamoDB feature store because AWS SDK (boto3 package) is not installed") + self._table_name = table_name + self._prefix = None if prefix == "" else prefix + self._client = boto3.client('dynamodb', **dynamodb_opts) + + def init_internal(self, all_data): + # Start by reading the existing keys; we will later delete any of these that weren't in all_data. + unused_old_keys = self._read_existing_keys(all_data.keys()) + requests = [] + num_items = 0 + inited_key = self._inited_key() + + # Insert or update every provided item + for kind, items in all_data.items(): + for key, item in items.items(): + encoded_item = self._marshal_item(kind, item) + requests.append({ 'PutRequest': { 'Item': encoded_item } }) + combined_key = (self._namespace_for_kind(kind), key) + unused_old_keys.discard(combined_key) + num_items = num_items + 1 + + # Now delete any previously existing items whose keys were not in the current data + for combined_key in unused_old_keys: + if combined_key[0] != inited_key: + requests.append({ 'DeleteRequest': { 'Key': self._make_keys(combined_key[0], combined_key[1]) } }) + + # Now set the special key that we check in initialized_internal() + requests.append({ 'PutRequest': { 'Item': self._make_keys(inited_key, inited_key) } }) + + _DynamoDBHelpers.batch_write_requests(self._client, self._table_name, requests) + log.info('Initialized table %s with %d items', self._table_name, num_items) + + def get_internal(self, kind, key): + resp = self._get_item_by_keys(self._namespace_for_kind(kind), key) + return self._unmarshal_item(resp.get('Item')) + + def get_all_internal(self, kind): + items_out = {} + paginator = self._client.get_paginator('query') + for resp in paginator.paginate(**self._make_query_for_kind(kind)): + for item in resp['Items']: + item_out = self._unmarshal_item(item) + items_out[item_out['key']] = item_out + return items_out + + def upsert_internal(self, kind, item): + encoded_item = self._marshal_item(kind, item) + try: + req = { + 'TableName': self._table_name, + 'Item': encoded_item, + 'ConditionExpression': 'attribute_not_exists(#namespace) or attribute_not_exists(#key) or :version > #version', + 'ExpressionAttributeNames': { + '#namespace': self.PARTITION_KEY, + '#key': self.SORT_KEY, + '#version': self.VERSION_ATTRIBUTE + }, + 'ExpressionAttributeValues': { + ':version': { 'N': str(item['version']) } + } + } + self._client.put_item(**req) + except self._client.exceptions.ConditionalCheckFailedException: + # The item was not updated because there's a newer item in the database. We must now + # read the item that's in the database and return it, so CachingStoreWrapper can cache it. + return self.get_internal(kind, item['key']) + return item + + def initialized_internal(self): + resp = self._get_item_by_keys(self._inited_key(), self._inited_key()) + return resp.get('Item') is not None and len(resp['Item']) > 0 + + def _prefixed_namespace(self, base): + return base if self._prefix is None else (self._prefix + ':' + base) + + def _namespace_for_kind(self, kind): + return self._prefixed_namespace(kind.namespace) + + def _inited_key(self): + return self._prefixed_namespace('$inited') + + def _make_keys(self, namespace, key): + return { + self.PARTITION_KEY: { 'S': namespace }, + self.SORT_KEY: { 'S': key } + } + + def _make_query_for_kind(self, kind): + return { + 'TableName': self._table_name, + 'ConsistentRead': True, + 'KeyConditions': { + self.PARTITION_KEY: { + 'AttributeValueList': [ + { 'S': self._namespace_for_kind(kind) } + ], + 'ComparisonOperator': 'EQ' + } + } + } + + def _get_item_by_keys(self, namespace, key): + return self._client.get_item(TableName=self._table_name, Key=self._make_keys(namespace, key)) + + def _read_existing_keys(self, kinds): + keys = set() + for kind in kinds: + req = self._make_query_for_kind(kind) + req['ProjectionExpression'] = '#namespace, #key' + req['ExpressionAttributeNames'] = { + '#namespace': self.PARTITION_KEY, + '#key': self.SORT_KEY + } + paginator = self._client.get_paginator('query') + for resp in paginator.paginate(**req): + for item in resp['Items']: + namespace = item[self.PARTITION_KEY]['S'] + key = item[self.SORT_KEY]['S'] + keys.add((namespace, key)) + return keys + + def _marshal_item(self, kind, item): + json_str = json.dumps(item) + ret = self._make_keys(self._namespace_for_kind(kind), item['key']) + ret[self.VERSION_ATTRIBUTE] = { 'N': str(item['version']) } + ret[self.ITEM_JSON_ATTRIBUTE] = { 'S': json_str } + return ret + + def _unmarshal_item(self, item): + if item is None: + return None + json_attr = item.get(self.ITEM_JSON_ATTRIBUTE) + return None if json_attr is None else json.loads(json_attr['S']) + + +class _DynamoDBHelpers(object): + @staticmethod + def batch_write_requests(client, table_name, requests): + batch_size = 25 + for batch in (requests[i:i+batch_size] for i in xrange(0, len(requests), batch_size)): + client.batch_write_item(RequestItems={ table_name: batch }) diff --git a/ldclient/integrations.py b/ldclient/integrations.py index 86b5248d..80063389 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -1,10 +1,33 @@ from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore from ldclient.redis_feature_store import _RedisFeatureStoreCore +class DynamoDB(object): + """Provides factory methods for integrations between the LaunchDarkly SDK and DynamoDB. + """ + + @staticmethod + def new_feature_store(table_name, + prefix=None, + dynamodb_opts={}, + caching=CacheConfig.default()): + """Creates a DynamoDB-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + + :param string table_name: The name of an existing DynamoDB table + :param string prefix: An optional namespace prefix to be prepended to all Redis keys + :param dict dynamodb_opts: Optional parameters for configuring the DynamoDB client, as defined in + the boto3 API + :param CacheConfig caching: Specifies whether local caching should be enabled and if so, + sets the cache properties; defaults to `CacheConfig.default()` + """ + core = _DynamoDBFeatureStoreCore(table_name, prefix, dynamodb_opts) + return CachingStoreWrapper(core, caching) + + class Redis(object): - """Provides factory methods for integrations between the LaunchDarkly SDK and Redis, + """Provides factory methods for integrations between the LaunchDarkly SDK and Redis. """ DEFAULT_URL = 'redis://localhost:6379/0' DEFAULT_PREFIX = 'launchdarkly' diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index b9bdf731..02df0e57 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -1,6 +1,11 @@ import json -import redis +have_redis = False +try: + import redis + have_redis = True +except ImportError: + pass from ldclient import log from ldclient.feature_store import CacheConfig @@ -21,7 +26,8 @@ def __init__(self, max_connections=16, expiration=15, capacity=1000): - + if not have_redis: + raise NotImplementedError("Cannot use Redis feature store because redis package is not installed") self.core = _RedisFeatureStoreCore(url, prefix, max_connections) # exposed for testing self._wrapper = CachingStoreWrapper(self.core, CacheConfig(expiration=expiration, capacity=capacity)) @@ -47,6 +53,7 @@ def initialized(self): class _RedisFeatureStoreCore(FeatureStoreCore): def __init__(self, url, prefix, max_connections): + self._prefix = prefix self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) self.test_update_hook = None # exposed for testing diff --git a/test-requirements.txt b/test-requirements.txt index 413ef355..88cbbc2e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,6 +1,7 @@ mock>=2.0.0 pytest>=2.8 redis>=2.10.5 +boto3>=1.9.71 coverage>=4.4 pytest-capturelog>=0.7 pytest-cov>=2.4.0 diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 5716fa0e..003434b1 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -1,9 +1,12 @@ +import boto3 import json import pytest import redis +import time +from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore, _DynamoDBHelpers from ldclient.feature_store import CacheConfig, InMemoryFeatureStore -from ldclient.integrations import Redis +from ldclient.integrations import DynamoDB, Redis from ldclient.redis_feature_store import RedisFeatureStore from ldclient.versioned_data_kind import FEATURES @@ -16,38 +19,124 @@ def get_log_lines(caplog): return loglines -class TestFeatureStore: +class InMemoryTester(object): + def init_store(self): + return InMemoryFeatureStore() + + +class RedisTester(object): redis_host = 'localhost' redis_port = 6379 - def clear_redis_data(self): + def __init__(self, cache_config): + self._cache_config = cache_config + + def init_store(self): + self._clear_data() + return Redis.new_feature_store(caching=self._cache_config) + + def _clear_data(self): r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) r.delete("launchdarkly:features") - def in_memory(self): - return InMemoryFeatureStore() - def redis_with_local_cache(self): - self.clear_redis_data() - return Redis.new_feature_store() - - def redis_no_local_cache(self): - self.clear_redis_data() - return Redis.new_feature_store(caching=CacheConfig.disabled()) - - def deprecated_redis_with_local_cache(self): - self.clear_redis_data() - return RedisFeatureStore() +class RedisWithDeprecatedConstructorTester(RedisTester): + def init_store(self): + self._clear_data() + return RedisFeatureStore(expiration=(30 if self._cache_config.enabled else 0)) + + +class DynamoDBTester(object): + table_name = 'LD_DYNAMODB_TEST_TABLE' + table_created = False + options = { 'endpoint_url': 'http://localhost:8000', 'region_name': 'us-east-1' } + + def __init__(self, cache_config): + self._cache_config = cache_config + + def init_store(self): + self._create_table() + self._clear_data() + return DynamoDB.new_feature_store(self.table_name, dynamodb_opts=self.options) + + def _create_table(self): + if self.table_created: + return + client = boto3.client('dynamodb', **self.options) + try: + client.describe_table(TableName=self.table_name) + self.table_created = True + return + except client.exceptions.ResourceNotFoundException: + pass + req = { + 'TableName': self.table_name, + 'KeySchema': [ + { + 'AttributeName': _DynamoDBFeatureStoreCore.PARTITION_KEY, + 'KeyType': 'HASH', + }, + { + 'AttributeName': _DynamoDBFeatureStoreCore.SORT_KEY, + 'KeyType': 'RANGE' + } + ], + 'AttributeDefinitions': [ + { + 'AttributeName': _DynamoDBFeatureStoreCore.PARTITION_KEY, + 'AttributeType': 'S' + }, + { + 'AttributeName': _DynamoDBFeatureStoreCore.SORT_KEY, + 'AttributeType': 'S' + } + ], + 'ProvisionedThroughput': { + 'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1 + } + } + client.create_table(**req) + while True: + try: + client.describe_table(TableName=self.table_name) + self.table_created = True + return + except client.exceptions.ResourceNotFoundException: + time.sleep(0.5) + + def _clear_data(self): + client = boto3.client('dynamodb', **self.options) + delete_requests = [] + req = { + 'TableName': self.table_name, + 'ConsistentRead': True, + 'ProjectionExpression': '#namespace, #key', + 'ExpressionAttributeNames': { + '#namespace': _DynamoDBFeatureStoreCore.PARTITION_KEY, + '#key': _DynamoDBFeatureStoreCore.SORT_KEY + } + } + for resp in client.get_paginator('scan').paginate(**req): + for item in resp['Items']: + delete_requests.append({ 'DeleteRequest': { 'Key': item } }) + _DynamoDBHelpers.batch_write_requests(client, self.table_name, delete_requests) - def deprecated_redis_no_local_cache(self): - self.clear_redis_data() - return RedisFeatureStore(expiration=0) - params = [in_memory, redis_with_local_cache, redis_no_local_cache] +class TestFeatureStore: + params = [ + InMemoryTester(), + RedisTester(CacheConfig.default()), + RedisTester(CacheConfig.disabled()), + RedisWithDeprecatedConstructorTester(CacheConfig.default()), + RedisWithDeprecatedConstructorTester(CacheConfig.disabled()), + DynamoDBTester(CacheConfig.default()), + DynamoDBTester(CacheConfig.disabled()) + ] @pytest.fixture(params=params) def store(self, request): - return request.param(self) + return request.param.init_store() @staticmethod def make_feature(key, ver): @@ -79,6 +168,9 @@ def base_initialized_store(self, store): }) return store + def test_not_initialized_before_init(self, store): + assert store.initialized is False + def test_initialized(self, store): store = self.base_initialized_store(store) assert store.initialized is True From 431dddf55ea9bdc16d1e15d680e519287ed14723 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 29 Dec 2018 15:25:52 -0800 Subject: [PATCH 19/50] add test credentials --- testing/test_feature_store.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 003434b1..229a0f40 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -49,7 +49,12 @@ def init_store(self): class DynamoDBTester(object): table_name = 'LD_DYNAMODB_TEST_TABLE' table_created = False - options = { 'endpoint_url': 'http://localhost:8000', 'region_name': 'us-east-1' } + options = { + 'aws_access_key_id': 'key', # not used by local DynamoDB, but still required + 'aws_secret_access_key': 'secret', + 'endpoint_url': 'http://localhost:8000', + 'region_name': 'us-east-1' + } def __init__(self, cache_config): self._cache_config = cache_config From 3aa5644edf5c5f65f201733c20bb21e924fd10ef Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 31 Dec 2018 11:34:53 -0800 Subject: [PATCH 20/50] link in comment --- ldclient/integrations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldclient/integrations.py b/ldclient/integrations.py index 80063389..6102d354 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -18,7 +18,7 @@ def new_feature_store(table_name, :param string table_name: The name of an existing DynamoDB table :param string prefix: An optional namespace prefix to be prepended to all Redis keys :param dict dynamodb_opts: Optional parameters for configuring the DynamoDB client, as defined in - the boto3 API + the boto3 API; see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client :param CacheConfig caching: Specifies whether local caching should be enabled and if so, sets the cache properties; defaults to `CacheConfig.default()` """ From bd00276f874d40d1a5d1f2c66e033cd99452f00c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 31 Dec 2018 11:36:13 -0800 Subject: [PATCH 21/50] comment --- ldclient/redis_feature_store.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index b9bdf731..e08af6dc 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -9,6 +9,11 @@ from ldclient.versioned_data_kind import FEATURES +# Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating +# to _RedisFeatureStoreCore where the actual database logic is. This class was retained for historical +# reasons, to support existing code that calls the RedisFeatureStore constructor. In the future, we +# will migrate away from exposing these concrete classes and use only the factory methods. + class RedisFeatureStore(FeatureStore): """A Redis-backed implementation of :class:`ldclient.feature_store.FeatureStore`. From 534ec5deadb46e318a18e7bc80431f2bc531a639 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 31 Dec 2018 12:48:27 -0800 Subject: [PATCH 22/50] don't catch exceptions in Redis feature store, let the client catch them --- ldclient/client.py | 15 +++++-- ldclient/feature_store_helpers.py | 6 +-- ldclient/interfaces.py | 4 +- ldclient/redis_feature_store.py | 19 ++------- testing/test_feature_store.py | 30 -------------- testing/test_feature_store_helpers.py | 59 +++++++++++++++++++++++---- testing/test_ldclient_evaluation.py | 56 +++++++++++++++++++++++++ 7 files changed, 128 insertions(+), 61 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 039fad52..eea7d970 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -243,7 +243,14 @@ def send_event(value, variation=None, flag=None, reason=None): if user is not None and user.get('key', "") == "": log.warn("User key is blank. Flag evaluation will proceed, but the user will not be stored in LaunchDarkly.") - flag = self._store.get(FEATURES, key, lambda x: x) + try: + flag = self._store.get(FEATURES, key, lambda x: x) + except Exception as e: + log.error("Unexpected error while retrieving feature flag \"%s\": %s" % (key, repr(e))) + log.debug(traceback.format_exc()) + reason = error_reason('EXCEPTION') + send_event(default, None, None, reason) + return EvaluationDetail(default, None, reason) if not flag: reason = error_reason('FLAG_NOT_FOUND') send_event(default, None, None, reason) @@ -264,7 +271,7 @@ def send_event(value, variation=None, flag=None, reason=None): send_event(detail.value, detail.variation_index, flag, detail.reason) return detail except Exception as e: - log.error("Unexpected error while evaluating feature flag \"%s\": %s" % (key, e)) + log.error("Unexpected error while evaluating feature flag \"%s\": %s" % (key, repr(e))) log.debug(traceback.format_exc()) reason = error_reason('EXCEPTION') send_event(default, None, flag, reason) @@ -328,7 +335,7 @@ def all_flags_state(self, user, **kwargs): if flags_map is None: raise ValueError("feature store error") except Exception as e: - log.error("Unable to read flags for all_flag_state: %s" % e) + log.error("Unable to read flags for all_flag_state: %s" % repr(e)) return FeatureFlagsState(False) for key, flag in flags_map.items(): @@ -339,7 +346,7 @@ def all_flags_state(self, user, **kwargs): state.add_flag(flag, detail.value, detail.variation_index, detail.reason if with_reasons else None, details_only_if_tracked) except Exception as e: - log.error("Error evaluating flag \"%s\" in all_flags_state: %s" % (key, e)) + log.error("Error evaluating flag \"%s\" in all_flags_state: %s" % (key, repr(e))) log.debug(traceback.format_exc()) reason = {'kind': 'ERROR', 'errorKind': 'EXCEPTION'} state.add_flag(flag, None, None, reason if with_reasons else None, details_only_if_tracked) diff --git a/ldclient/feature_store_helpers.py b/ldclient/feature_store_helpers.py index d8359274..2ba83713 100644 --- a/ldclient/feature_store_helpers.py +++ b/ldclient/feature_store_helpers.py @@ -42,7 +42,7 @@ def get(self, kind, key, callback=lambda x: x): self._cache[cache_key] = [item] return callback(self._item_if_not_deleted(item)) - def all(self, kind, callback): + def all(self, kind, callback=lambda x: x): if self._cache is not None: cache_key = self._all_cache_key(kind) cached_items = self._cache.get(cache_key) @@ -68,11 +68,11 @@ def initialized(self): if self._inited: return True if self._cache is None: - result = self._core.initialized_internal() + result = bool(self._core.initialized_internal()) else: result = self._cache.get(CachingStoreWrapper.__INITED_CACHE_KEY__) if result is None: - result = self._core.initialized_internal() + result = bool(self._core.initialized_internal()) self._cache[CachingStoreWrapper.__INITED_CACHE_KEY__] = result if result: self._inited = True diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 2710fa25..9556bdfc 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -19,7 +19,7 @@ class FeatureStore(object): __metaclass__ = ABCMeta @abstractmethod - def get(self, kind, key, callback): + def get(self, kind, key, callback=lambda x: x): """ Retrieves the object to which the specified key is mapped, or None if the key is not found or the associated object has a "deleted" property of True. The retrieved object, if any (a @@ -35,7 +35,7 @@ def get(self, kind, key, callback): """ @abstractmethod - def all(self, kind, callback): + def all(self, kind, callback=lambda x: x): """ Retrieves a dictionary of all associated objects of a given kind. The retrieved dict of keys to objects can be transformed by the specified callback. diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index e08af6dc..c3eabc42 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -77,16 +77,10 @@ def init_internal(self, all_data): def get_all_internal(self, kind): r = redis.Redis(connection_pool=self._pool) - try: - all_items = r.hgetall(self._items_key(kind)) - except BaseException as e: - log.error("RedisFeatureStore: Could not retrieve '%s' from Redis with error: %s. Returning None.", - kind.namespace, e) - return None + all_items = r.hgetall(self._items_key(kind)) if all_items is None or all_items is "": - log.warn("RedisFeatureStore: call to get all '%s' returned no results. Returning None.", kind.namespace) - return None + all_items = {} results = {} for key, item_json in all_items.items(): @@ -95,13 +89,8 @@ def get_all_internal(self, kind): return results def get_internal(self, kind, key): - try: - r = redis.Redis(connection_pool=self._pool) - item_json = r.hget(self._items_key(kind), key) - except BaseException as e: - log.error("RedisFeatureStore: Could not retrieve key %s from '%s' with error: %s", - key, kind.namespace, e) - return None + r = redis.Redis(connection_pool=self._pool) + item_json = r.hget(self._items_key(kind), key) if item_json is None or item_json is "": log.debug("RedisFeatureStore: key %s not found in '%s'. Returning None.", key, kind.namespace) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 5716fa0e..ffff39a8 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -8,14 +8,6 @@ from ldclient.versioned_data_kind import FEATURES -def get_log_lines(caplog): - loglines = caplog.records - if callable(loglines): - # records() is a function in older versions of the caplog plugin - loglines = loglines() - return loglines - - class TestFeatureStore: redis_host = 'localhost' redis_port = 6379 @@ -178,25 +170,3 @@ def hook(base_key, key): store.upsert(FEATURES, feature) result = store.get(FEATURES, 'flagkey', lambda x: x) assert result['version'] == 5 - - def test_exception_is_handled_in_get(self, caplog): - # This just verifies the fix for a bug that caused an error during exception handling in Python 3 - store = RedisFeatureStore(url='redis://bad') - feature = store.get(FEATURES, 'flagkey') - assert feature is None - loglines = get_log_lines(caplog) - assert len(loglines) == 2 - message = loglines[1].message - assert message.startswith("RedisFeatureStore: Could not retrieve key flagkey from 'features' with error:") - assert "connecting to bad:6379" in message - - def test_exception_is_handled_in_all(self, caplog): - # This just verifies the fix for a bug that caused an error during exception handling in Python 3 - store = RedisFeatureStore(url='redis://bad') - all = store.all(FEATURES, lambda x: x) - assert all == {} - loglines = get_log_lines(caplog) - assert len(loglines) == 2 - message = loglines[1].message - assert message.startswith("RedisFeatureStore: Could not retrieve 'features' from Redis") - assert "connecting to bad:6379" in message diff --git a/testing/test_feature_store_helpers.py b/testing/test_feature_store_helpers.py index 01bb245a..77ccb6f8 100644 --- a/testing/test_feature_store_helpers.py +++ b/testing/test_feature_store_helpers.py @@ -16,20 +16,25 @@ def __init__(self): self.data = {} self.inited = False self.inited_query_count = 0 + self.error = None def init_internal(self, all_data): + self._maybe_throw() self.data = {} for kind, items in all_data.items(): self.data[kind] = items.copy() def get_internal(self, kind, key): + self._maybe_throw() items = self.data.get(kind) return None if items is None else items.get(key) def get_all_internal(self, kind): + self._maybe_throw() return self.data.get(kind) def upsert_internal(self, kind, item): + self._maybe_throw() key = item.get('key') items = self.data.get(kind) if items is None: @@ -42,9 +47,14 @@ def upsert_internal(self, kind, item): return old_item def initialized_internal(self): + self._maybe_throw() self.inited_query_count = self.inited_query_count + 1 return self.inited - + + def _maybe_throw(self): + if self.error is not None: + raise self.error + def force_set(self, kind, item): items = self.data.get(kind) if items is None: @@ -57,6 +67,9 @@ def force_remove(self, kind, key): if items is not None: items.pop(key, None) +class CustomError(Exception): + pass + class TestCachingStoreWrapper: @pytest.mark.parametrize("cached", [False, True]) def test_get_item(self, cached): @@ -119,6 +132,14 @@ def test_cached_get_uses_values_from_init(self): core.force_remove(THINGS, item1["key"]) assert wrapper.get(THINGS, item1["key"]) == item1 + @pytest.mark.parametrize("cached", [False, True]) + def test_get_can_throw_exception(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + core.error = CustomError() + with pytest.raises(CustomError, message="expected exception"): + wrapper.get(THINGS, "key", lambda x: x) + @pytest.mark.parametrize("cached", [False, True]) def test_get_all(self, cached): core = MockCore() @@ -128,13 +149,13 @@ def test_get_all(self, cached): core.force_set(THINGS, item1) core.force_set(THINGS, item2) - assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1, item2["key"]: item2 } + assert wrapper.all(THINGS) == { item1["key"]: item1, item2["key"]: item2 } core.force_remove(THINGS, item2["key"]) if cached: - assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1, item2["key"]: item2 } + assert wrapper.all(THINGS) == { item1["key"]: item1, item2["key"]: item2 } else: - assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1 } + assert wrapper.all(THINGS) == { item1["key"]: item1 } @pytest.mark.parametrize("cached", [False, True]) def test_get_all_removes_deleted_items(self, cached): @@ -145,14 +166,14 @@ def test_get_all_removes_deleted_items(self, cached): core.force_set(THINGS, item1) core.force_set(THINGS, item2) - assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1 } + assert wrapper.all(THINGS) == { item1["key"]: item1 } @pytest.mark.parametrize("cached", [False, True]) def test_get_all_changes_None_to_empty_dict(self, cached): core = MockCore() wrapper = make_wrapper(core, cached) - assert wrapper.all(WRONG_THINGS, lambda x:x) == {} + assert wrapper.all(WRONG_THINGS) == {} @pytest.mark.parametrize("cached", [False, True]) def test_get_all_iwith_lambda(self, cached): @@ -176,7 +197,15 @@ def test_cached_get_all_uses_values_from_init(self): wrapper.init({ THINGS: both }) core.force_remove(THINGS, item1["key"]) - assert wrapper.all(THINGS, lambda x: x) == both + assert wrapper.all(THINGS) == both + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_all_can_throw_exception(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + core.error = CustomError() + with pytest.raises(CustomError, message="expected exception"): + wrapper.all(THINGS) @pytest.mark.parametrize("cached", [False, True]) def test_upsert_successful(self, cached): @@ -221,6 +250,14 @@ def test_cached_upsert_unsuccessful(self): core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache assert wrapper.get(THINGS, key) == itemv2 + @pytest.mark.parametrize("cached", [False, True]) + def test_upsert_can_throw_exception(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + core.error = CustomError() + with pytest.raises(CustomError, message="expected exception"): + wrapper.upsert(THINGS, { "key": "x", "version": 1 }) + @pytest.mark.parametrize("cached", [False, True]) def test_delete(self, cached): core = MockCore() @@ -239,6 +276,14 @@ def test_delete(self, cached): core.force_set(THINGS, itemv3) # make a change that bypasses the cache assert wrapper.get(THINGS, key) == (None if cached else itemv3) + @pytest.mark.parametrize("cached", [False, True]) + def test_delete_can_throw_exception(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + core.error = CustomError() + with pytest.raises(CustomError, message="expected exception"): + wrapper.delete(THINGS, "x", 1) + def test_uncached_initialized_queries_state_only_until_inited(self): core = MockCore() wrapper = make_wrapper(core, False) diff --git a/testing/test_ldclient_evaluation.py b/testing/test_ldclient_evaluation.py index 46c48756..e48f0329 100644 --- a/testing/test_ldclient_evaluation.py +++ b/testing/test_ldclient_evaluation.py @@ -4,6 +4,7 @@ from ldclient.client import LDClient, Config from ldclient.feature_store import InMemoryFeatureStore from ldclient.flag import EvaluationDetail +from ldclient.interfaces import FeatureStore from ldclient.versioned_data_kind import FEATURES from testing.stub_util import MockEventProcessor, MockUpdateProcessor from testing.test_ldclient import make_off_flag_with_value @@ -28,6 +29,17 @@ 'debugEventsUntilDate': 1000 } +class ErroringFeatureStore(FeatureStore): + def get(self, kind, key, callback=lambda x: x): + raise NotImplementedError() + + def all(self, kind, callback=lambda x: x): + raise NotImplementedError() + + @property + def initialized(self): + return True + def make_client(store): return LDClient(config=Config(sdk_key='SDK_KEY', base_uri='http://test', @@ -35,6 +47,14 @@ def make_client(store): update_processor_class=MockUpdateProcessor, feature_store=store)) +def get_log_lines(caplog): + loglines = caplog.records + if callable(loglines): + # records() is a function in older versions of the caplog plugin + loglines = loglines() + return loglines + + def test_variation_for_existing_feature(): feature = make_off_flag_with_value('feature.key', 'value') store = InMemoryFeatureStore() @@ -116,6 +136,25 @@ def test_variation_detail_for_flag_that_evaluates_to_none(): assert expected == actual assert actual.is_default_value() == True +def test_variation_when_feature_store_throws_error(caplog): + store = ErroringFeatureStore() + client = make_client(store) + assert client.variation('feature.key', { "key": "user" }, default='default') == 'default' + loglines = get_log_lines(caplog) + assert len(loglines) == 1 + assert loglines[0].message == 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' + +def test_variation_detail_when_feature_store_throws_error(caplog): + store = ErroringFeatureStore() + client = make_client(store) + expected = EvaluationDetail('default', None, {'kind': 'ERROR', 'errorKind': 'EXCEPTION'}) + actual = client.variation_detail('feature.key', { }, default='default') + assert expected == actual + assert actual.is_default_value() == True + loglines = get_log_lines(caplog) + assert len(loglines) == 1 + assert loglines[0].message == 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' + def test_all_flags_returns_values(): store = InMemoryFeatureStore() store.init({ FEATURES: { 'key1': flag1, 'key2': flag2 } }) @@ -137,6 +176,14 @@ def test_all_flags_returns_none_if_user_has_no_key(): result = client.all_flags({ }) assert result is None +def test_all_flags_returns_none_if_feature_store_throws_error(caplog): + store = ErroringFeatureStore() + client = make_client(store) + assert client.all_flags({ "key": "user" }) is None + loglines = get_log_lines(caplog) + assert len(loglines) == 1 + assert loglines[0].message == 'Unable to read flags for all_flag_state: NotImplementedError()' + def test_all_flags_state_returns_state(): store = InMemoryFeatureStore() store.init({ FEATURES: { 'key1': flag1, 'key2': flag2 } }) @@ -297,3 +344,12 @@ def test_all_flags_state_returns_empty_state_if_user_has_no_key(): client = make_client(store) state = client.all_flags_state({ }) assert state.valid == False + +def test_all_flags_returns_empty_state_if_feature_store_throws_error(caplog): + store = ErroringFeatureStore() + client = make_client(store) + state = client.all_flags_state({ "key": "user" }) + assert state.valid == False + loglines = get_log_lines(caplog) + assert len(loglines) == 1 + assert loglines[0].message == 'Unable to read flags for all_flag_state: NotImplementedError()' From 5f16c8d31337ab03f4b925c5552074f6562d1b55 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 31 Dec 2018 12:48:35 -0800 Subject: [PATCH 23/50] gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 0d1700ee..d988c61f 100644 --- a/.gitignore +++ b/.gitignore @@ -44,6 +44,7 @@ nosetests.xml coverage.xml *,cover .hypothesis/ +.pytest_cache # Translations *.mo From ac0f2eae2fc64b9402b708e1cf418eb1d2ce320a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 31 Dec 2018 13:02:54 -0800 Subject: [PATCH 24/50] misc test fixes --- testing/test_ldclient_evaluation.py | 35 ++++++++++++++++------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/testing/test_ldclient_evaluation.py b/testing/test_ldclient_evaluation.py index e48f0329..be925a5c 100644 --- a/testing/test_ldclient_evaluation.py +++ b/testing/test_ldclient_evaluation.py @@ -36,6 +36,15 @@ def get(self, kind, key, callback=lambda x: x): def all(self, kind, callback=lambda x: x): raise NotImplementedError() + def upsert(self, kind, item): + pass + + def delete(self, key, version): + pass + + def init(self, data): + pass + @property def initialized(self): return True @@ -47,12 +56,12 @@ def make_client(store): update_processor_class=MockUpdateProcessor, feature_store=store)) -def get_log_lines(caplog): +def get_log_lines(caplog, level): loglines = caplog.records if callable(loglines): # records() is a function in older versions of the caplog plugin loglines = loglines() - return loglines + return [line.message for line in loglines if line.levelname == level] def test_variation_for_existing_feature(): @@ -140,20 +149,18 @@ def test_variation_when_feature_store_throws_error(caplog): store = ErroringFeatureStore() client = make_client(store) assert client.variation('feature.key', { "key": "user" }, default='default') == 'default' - loglines = get_log_lines(caplog) - assert len(loglines) == 1 - assert loglines[0].message == 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' + errlog = get_log_lines(caplog, 'ERROR') + assert errlog == [ 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' ] def test_variation_detail_when_feature_store_throws_error(caplog): store = ErroringFeatureStore() client = make_client(store) expected = EvaluationDetail('default', None, {'kind': 'ERROR', 'errorKind': 'EXCEPTION'}) - actual = client.variation_detail('feature.key', { }, default='default') + actual = client.variation_detail('feature.key', { "key": "user" }, default='default') assert expected == actual assert actual.is_default_value() == True - loglines = get_log_lines(caplog) - assert len(loglines) == 1 - assert loglines[0].message == 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' + errlog = get_log_lines(caplog, 'ERROR') + assert errlog == [ 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' ] def test_all_flags_returns_values(): store = InMemoryFeatureStore() @@ -180,9 +187,8 @@ def test_all_flags_returns_none_if_feature_store_throws_error(caplog): store = ErroringFeatureStore() client = make_client(store) assert client.all_flags({ "key": "user" }) is None - loglines = get_log_lines(caplog) - assert len(loglines) == 1 - assert loglines[0].message == 'Unable to read flags for all_flag_state: NotImplementedError()' + errlog = get_log_lines(caplog, 'ERROR') + assert errlog == [ 'Unable to read flags for all_flag_state: NotImplementedError()' ] def test_all_flags_state_returns_state(): store = InMemoryFeatureStore() @@ -350,6 +356,5 @@ def test_all_flags_returns_empty_state_if_feature_store_throws_error(caplog): client = make_client(store) state = client.all_flags_state({ "key": "user" }) assert state.valid == False - loglines = get_log_lines(caplog) - assert len(loglines) == 1 - assert loglines[0].message == 'Unable to read flags for all_flag_state: NotImplementedError()' + errlog = get_log_lines(caplog, 'ERROR') + assert errlog == [ 'Unable to read flags for all_flag_state: NotImplementedError()' ] From 256b6fb0ca3eb868f28526a209b194b06267d685 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 9 Jan 2019 12:57:28 -0800 Subject: [PATCH 25/50] implement dependency ordering for feature store data --- ldclient/client.py | 32 ++++++++++++++++++++- ldclient/feature_store.py | 51 ++++++++++++++++++++++++++++++++- ldclient/versioned_data_kind.py | 11 +++++-- testing/stub_util.py | 27 +++++++++++++++-- testing/test_ldclient.py | 35 ++++++++++++++++++++-- 5 files changed, 148 insertions(+), 8 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index eea7d970..3ce19d15 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -12,6 +12,7 @@ from ldclient.feature_requester import FeatureRequesterImpl from ldclient.flag import EvaluationDetail, evaluate, error_reason from ldclient.flags_state import FeatureFlagsState +from ldclient.interfaces import FeatureStore from ldclient.polling import PollingUpdateProcessor from ldclient.streaming import StreamingUpdateProcessor from ldclient.util import check_uwsgi, log @@ -27,6 +28,35 @@ from threading import Lock +class _FeatureStoreClientWrapper(FeatureStore): + """Provides additional behavior that the client requires before or after feature store operations. + Currently this just means sorting the data set for init(). In the future we may also use this + to provide an update listener capability. + """ + + def __init__(self, store): + self.store = store + + def get(self, kind, key, callback): + return self.store.get(self, kind, key, callback) + + def all(self, kind, callback): + return self.store.all(self, kind, callback) + + def init(self, all_data): + return self.store.init(self, all_data) + + def delete(self, kind, key, version): + return self.store.delete(self, kind, key, version) + + def upsert(self, kind, item): + return self.store.upsert(self, kind, item) + + @property + def initialized(self): + return self.store.initialized + + class LDClient(object): def __init__(self, sdk_key=None, config=None, start_wait=5): """Constructs a new LDClient instance. @@ -55,7 +85,7 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): self._event_processor = None self._lock = Lock() - self._store = self._config.feature_store + self._store = _FeatureStoreClientWrapper(self._config.feature_store) """ :type: FeatureStore """ if self._config.offline or not self._config.send_events: diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index e4d2f667..07223a32 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -1,4 +1,4 @@ -from collections import defaultdict +from collections import OrderedDict, defaultdict from ldclient.util import log from ldclient.interfaces import FeatureStore from ldclient.rwlock import ReadWriteLock @@ -126,3 +126,52 @@ def initialized(self): return self._initialized finally: self._lock.runlock() + + +class _FeatureStoreDataSetSorter: + """ + Implements a dependency graph ordering for data to be stored in a feature store. We must use this + on every data set that will be passed to the feature store's init() method. + """ + @staticmethod + def sort_all_collections(all_data): + """ Returns a copy of the input data that has the following guarantees: the iteration order of the outer + dictionary will be in ascending order by the VersionDataKind's :priority property (if any), and for each + data kind that has a "get_dependency_keys" function, the inner dictionary will have an iteration order + where B is before A if A has a dependency on B. + """ + outer_hash = OrderedDict() + kinds = list(all_data.keys()) + def priority_order(kind): + return kind.get('priority', len(kind['namespace'])) # use arbitrary order if there's no priority + kinds.sort(key=priority_order) + for kind in kinds: + items = all_data[kind] + outer_hash[kind] = _FeatureStoreDataSetSorter._sort_collection(kind, items) + return outer_hash + + @staticmethod + def _sort_collection(kind, input): + if len(input) == 0 or not hasattr(kind, 'get_dependency_keys'): + return input + dependency_fn = kind.get_dependency_keys + if dependency_fn is None or len(input) == 0: + return input + remaining_items = input.copy() + items_out = OrderedDict() + while len(remaining_items) > 0: + # pick a random item that hasn't been updated yet + for key, item in remaining_items: + _FeatureStoreDataSetSorter._add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) + break + return items_out + + @staticmethod + def _add_with_dependencies_first(item, dependency_fn, remaining_items, items_out): + key = item.get('key') + del remaining_items[key] # we won't need to visit this item again + for dep_key in dependency_fn(item): + dep_item = remaining_items.get(dep_key) + if dep_item is not None: + _FeatureStoreDataSetSorter._add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out) + items_out[key] = item diff --git a/ldclient/versioned_data_kind.py b/ldclient/versioned_data_kind.py index 6df96a32..0054a42e 100644 --- a/ldclient/versioned_data_kind.py +++ b/ldclient/versioned_data_kind.py @@ -10,10 +10,17 @@ VersionedDataKind = namedtuple('VersionedDataKind', ['namespace', 'request_api_path', 'stream_api_path']) +VersionedDataKindWithOrdering = namedtuple('VersionedDataKindWithOrdering', + ['namespace', 'request_api_path', 'stream_api_path', 'priority', 'get_dependency_keys']) + FEATURES = VersionedDataKind(namespace = "features", request_api_path = "/sdk/latest-flags", - stream_api_path = "/flags/") + stream_api_path = "/flags/", + priority = 1, + get_dependency_keys = lambda flag: p.get('key') for p in flag.get('prerequisites', [])) SEGMENTS = VersionedDataKind(namespace = "segments", request_api_path = "/sdk/latest-segments", - stream_api_path = "/segments/") + stream_api_path = "/segments/", + priority = 0, + get_dependency_keys = None) diff --git a/testing/stub_util.py b/testing/stub_util.py index bcb45ef2..80e53af6 100644 --- a/testing/stub_util.py +++ b/testing/stub_util.py @@ -1,14 +1,13 @@ from email.utils import formatdate from requests.structures import CaseInsensitiveDict -from ldclient.interfaces import EventProcessor, FeatureRequester, UpdateProcessor +from ldclient.interfaces import EventProcessor, FeatureRequester, FeatureStore, UpdateProcessor class MockEventProcessor(EventProcessor): def __init__(self, *_): self._running = False self._events = [] - mock_event_processor = self def stop(self): self._running = False @@ -103,3 +102,27 @@ def is_alive(self): def initialized(self): return True + +class CapturingFeatureStore(FeatureStore): + def init(self, all_data): + self.data = all_data + + def get(self, kind, key, callback=lambda x: x): + pass + + def all(self, kind, callback=lambda x: x): + pass + + def delete(self, kind, key, version): + pass + + def upsert(self, kind, item): + pass + + @property + def initialized(self): + return True + + @property + def received_data(self): + return self.data diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 1766386b..be290fda 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -2,10 +2,10 @@ from ldclient.client import LDClient, Config from ldclient.event_processor import NullEventProcessor from ldclient.feature_store import InMemoryFeatureStore -from ldclient.interfaces import FeatureRequester, FeatureStore, UpdateProcessor +from ldclient.interfaces import UpdateProcessor from ldclient.versioned_data_kind import FEATURES import pytest -from testing.stub_util import MockEventProcessor, MockUpdateProcessor +from testing.stub_util import CapturingFeatureStore, MockEventProcessor, MockUpdateProcessor from testing.sync_util import wait_until try: @@ -259,3 +259,34 @@ def test_event_for_existing_feature_with_no_user_key(): def test_secure_mode_hash(): user = {'key': 'Message'} assert offline_client.secure_mode_hash(user) == "aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597" + + +dependency_ordering_test_data = { + FEATURES: { + + }, + SEGMENTS: { + + } +} + +class DependencyOrderingDataUpdateProcessor(UpdateProcessor): + def __init__(self, config, store, ready): + store.init(dependency_ordering_test_data) + ready.set() + + def start(self): + pass + + def initialized(self): + return True + + +def test_store_data_set_ordering(): + store = CapturingFeatureStore() + config = Config(sdk_key = 'SDK_KEY', send_events=False, feature_store=store, + update_processor_class=DependencyOrderingDataUpdateProcessor) + client = LDClient(config=config) + + data = store.received_data + From 289077c9761e1cba7d574732ccd7059fd2ca1ede Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 9 Jan 2019 13:23:49 -0800 Subject: [PATCH 26/50] fix incomplete implementation & test --- ldclient/client.py | 15 ++++++++------- ldclient/feature_store.py | 7 +++++-- ldclient/versioned_data_kind.py | 8 +++++--- testing/test_ldclient.py | 32 +++++++++++++++++++++++++++----- 4 files changed, 45 insertions(+), 17 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 3ce19d15..30c37e53 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -10,6 +10,7 @@ from ldclient.config import Config as Config from ldclient.event_processor import NullEventProcessor from ldclient.feature_requester import FeatureRequesterImpl +from ldclient.feature_store import _FeatureStoreDataSetSorter from ldclient.flag import EvaluationDetail, evaluate, error_reason from ldclient.flags_state import FeatureFlagsState from ldclient.interfaces import FeatureStore @@ -37,20 +38,20 @@ class _FeatureStoreClientWrapper(FeatureStore): def __init__(self, store): self.store = store + def init(self, all_data): + return self.store.init(_FeatureStoreDataSetSorter.sort_all_collections(all_data)) + def get(self, kind, key, callback): - return self.store.get(self, kind, key, callback) + return self.store.get(kind, key, callback) def all(self, kind, callback): - return self.store.all(self, kind, callback) - - def init(self, all_data): - return self.store.init(self, all_data) + return self.store.all(kind, callback) def delete(self, kind, key, version): - return self.store.delete(self, kind, key, version) + return self.store.delete(kind, key, version) def upsert(self, kind, item): - return self.store.upsert(self, kind, item) + return self.store.upsert(kind, item) @property def initialized(self): diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 07223a32..fccef5b5 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -2,6 +2,7 @@ from ldclient.util import log from ldclient.interfaces import FeatureStore from ldclient.rwlock import ReadWriteLock +from six import iteritems class CacheConfig: @@ -143,7 +144,9 @@ def sort_all_collections(all_data): outer_hash = OrderedDict() kinds = list(all_data.keys()) def priority_order(kind): - return kind.get('priority', len(kind['namespace'])) # use arbitrary order if there's no priority + if hasattr(kind, 'priority'): + return kind.priority + return len(kind.namespace) # use arbitrary order if there's no priority kinds.sort(key=priority_order) for kind in kinds: items = all_data[kind] @@ -161,7 +164,7 @@ def _sort_collection(kind, input): items_out = OrderedDict() while len(remaining_items) > 0: # pick a random item that hasn't been updated yet - for key, item in remaining_items: + for key, item in iteritems(remaining_items): _FeatureStoreDataSetSorter._add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) break return items_out diff --git a/ldclient/versioned_data_kind.py b/ldclient/versioned_data_kind.py index 0054a42e..04acce43 100644 --- a/ldclient/versioned_data_kind.py +++ b/ldclient/versioned_data_kind.py @@ -7,19 +7,21 @@ to add a corresponding constant here and the existing store should be able to handle it. """ +# Note that VersionedDataKind without the extra attributes is no longer used in the SDK, +# but it's preserved here for backward compatibility just in case someone else used it VersionedDataKind = namedtuple('VersionedDataKind', ['namespace', 'request_api_path', 'stream_api_path']) VersionedDataKindWithOrdering = namedtuple('VersionedDataKindWithOrdering', ['namespace', 'request_api_path', 'stream_api_path', 'priority', 'get_dependency_keys']) -FEATURES = VersionedDataKind(namespace = "features", +FEATURES = VersionedDataKindWithOrdering(namespace = "features", request_api_path = "/sdk/latest-flags", stream_api_path = "/flags/", priority = 1, - get_dependency_keys = lambda flag: p.get('key') for p in flag.get('prerequisites', [])) + get_dependency_keys = lambda flag: (p.get('key') for p in flag.get('prerequisites', []))) -SEGMENTS = VersionedDataKind(namespace = "segments", +SEGMENTS = VersionedDataKindWithOrdering(namespace = "segments", request_api_path = "/sdk/latest-segments", stream_api_path = "/segments/", priority = 0, diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index be290fda..4e5dc2f1 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -3,7 +3,7 @@ from ldclient.event_processor import NullEventProcessor from ldclient.feature_store import InMemoryFeatureStore from ldclient.interfaces import UpdateProcessor -from ldclient.versioned_data_kind import FEATURES +from ldclient.versioned_data_kind import FEATURES, SEGMENTS import pytest from testing.stub_util import CapturingFeatureStore, MockEventProcessor, MockUpdateProcessor from testing.sync_util import wait_until @@ -263,10 +263,15 @@ def test_secure_mode_hash(): dependency_ordering_test_data = { FEATURES: { - + "a": { "key": "a", "prerequisites": [ { "key": "b" }, { "key": "c" } ] }, + "b": { "key": "b", "prerequisites": [ { "key": "c" }, { "key": "e" } ] }, + "c": { "key": "c" }, + "d": { "key": "d" }, + "e": { "key": "e" }, + "f": { "key": "f" } }, SEGMENTS: { - + "o": { "key": "o" } } } @@ -286,7 +291,24 @@ def test_store_data_set_ordering(): store = CapturingFeatureStore() config = Config(sdk_key = 'SDK_KEY', send_events=False, feature_store=store, update_processor_class=DependencyOrderingDataUpdateProcessor) - client = LDClient(config=config) + LDClient(config=config) data = store.received_data - + assert data is not None + assert len(data) == 2 + + assert data.keys()[0] == SEGMENTS + assert len(data.values()[0]) == len(dependency_ordering_test_data[SEGMENTS]) + + assert data.keys()[1] == FEATURES + flags_map = data.values()[1] + flags_list = flags_map.values() + assert len(flags_list) == len(dependency_ordering_test_data[FEATURES]) + for item_index, item in enumerate(flags_list): + for prereq in item.get("prerequisites", []): + prereq_item = flags_map[prereq["key"]] + prereq_index = flags_list.index(prereq_item) + if prereq_index > item_index: + all_keys = (f["key"] for f in flags_list) + raise Exception("%s depends on %s, but %s was listed first; keys in order are [%s]" % + (item["key"], prereq["key"], item["key"], ", ".join(all_keys))) From 2c5929497d015c1377d409124173e8b5c88cb7f9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 9 Jan 2019 13:31:23 -0800 Subject: [PATCH 27/50] Python 3.x fix --- testing/test_ldclient.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 4e5dc2f1..a31d2324 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -296,13 +296,15 @@ def test_store_data_set_ordering(): data = store.received_data assert data is not None assert len(data) == 2 + keys = list(data.keys()) + values = list(data.values()) - assert data.keys()[0] == SEGMENTS - assert len(data.values()[0]) == len(dependency_ordering_test_data[SEGMENTS]) + assert keys[0] == SEGMENTS + assert len(values[0]) == len(dependency_ordering_test_data[SEGMENTS]) - assert data.keys()[1] == FEATURES - flags_map = data.values()[1] - flags_list = flags_map.values() + assert keys[1] == FEATURES + flags_map = values[1] + flags_list = list(flags_map.values()) assert len(flags_list) == len(dependency_ordering_test_data[FEATURES]) for item_index, item in enumerate(flags_list): for prereq in item.get("prerequisites", []): From 78b611865e82278339e8fed4a3fe84ee24b24466 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 16:04:39 -0800 Subject: [PATCH 28/50] minor doc fixes --- README.md | 14 ++++++++++---- ldclient/integrations.py | 19 ++++++++++++++++++- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index edef13e6..d25ee307 100644 --- a/README.md +++ b/README.md @@ -52,7 +52,6 @@ Or it can be set from within python: os.environ["https_proxy"] = "https://web-proxy.domain.com:8080" ``` - If your proxy requires authentication then you can prefix the URN with your login information: ``` export HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 @@ -75,12 +74,19 @@ Your first feature flag # the code to run if the feature is off Supported Python versions ----------- +------------------------- + The SDK is tested with the most recent patch releases of Python 2.7, 3.3, 3.4, 3.5, and 3.6. Python 2.6 is no longer supported. +Database integrations +--------------------- + +Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `DynamoDB` and `Redis` classes in `ldclient.integrations`; to use them, call the `new_feature_store` method in the appropriate class, and put the returned object in the `feature_store` property of your client configuration. See [`ldclient.integrations`](https://github.com/launchdarkly/python-client-private/blob/master/ldclient/integrations.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. + Using flag data from a file --------------------------- -For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.py`](https://github.com/launchdarkly/python-client/blob/master/ldclient/file_data_source.py) for more details. + +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.py`](https://github.com/launchdarkly/python-client/blob/master/ldclient/file_data_source.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. Learn more ----------- @@ -100,7 +106,7 @@ Contributing See [CONTRIBUTING](CONTRIBUTING.md) for more information. About LaunchDarkly ------------ +------------------ * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. diff --git a/ldclient/integrations.py b/ldclient/integrations.py index 6102d354..63c01202 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -15,8 +15,21 @@ def new_feature_store(table_name, caching=CacheConfig.default()): """Creates a DynamoDB-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + To use this method, you must first install the `boto3` package containing the AWS SDK gems. + Then, put the object returned by this method into the `feature_store` property of your + client configuration (:class:ldclient.config.Config). + + Note that the DynamoDB table must already exist; the LaunchDarkly SDK does not create the table + automatically, because it has no way of knowing what additional properties (such as permissions + and throughput) you would want it to have. The table must have a partition key called + "namespace" and a sort key called "key", both with a string type. + + By default, the DynamoDB client will try to get your AWS credentials and region name from + environment variables and/or local configuration files, as described in the AWS SDK documentation. + You may also pass configuration settings in `dynamodb_opts`. + :param string table_name: The name of an existing DynamoDB table - :param string prefix: An optional namespace prefix to be prepended to all Redis keys + :param string prefix: An optional namespace prefix to be prepended to all DynamoDB keys :param dict dynamodb_opts: Optional parameters for configuring the DynamoDB client, as defined in the boto3 API; see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client :param CacheConfig caching: Specifies whether local caching should be enabled and if so, @@ -40,6 +53,10 @@ def new_feature_store(url='redis://localhost:6379/0', caching=CacheConfig.default()): """Creates a Redis-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + To use this method, you must first install the `redis` package. Then, put the object + returned by this method into the `feature_store` property of your client configuration + (:class:ldclient.config.Config). + :param string url: The URL of the Redis host; defaults to `DEFAULT_URL` :param string prefix: A namespace prefix to be prepended to all Redis keys; defaults to `DEFAULT_PREFIX` From 3eb821c483dfe9ae5a8d6b6d62a717bc6d32fc5b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 14:12:33 -0800 Subject: [PATCH 29/50] feature store test improvements --- ldclient/redis_feature_store.py | 4 +-- testing/test_feature_store.py | 47 ++++++++++++++++++++++++++++----- 2 files changed, 42 insertions(+), 9 deletions(-) diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index 27139567..16302212 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -59,10 +59,10 @@ def initialized(self): class _RedisFeatureStoreCore(FeatureStoreCore): def __init__(self, url, prefix, max_connections): - self._prefix = prefix + self._prefix = prefix or 'launchdarkly' self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) self.test_update_hook = None # exposed for testing - log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + prefix) + log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + self._prefix) def _items_key(self, kind): return "{0}:{1}".format(self._prefix, kind.namespace) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 8ab8c422..f6912ff3 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -15,6 +15,10 @@ class InMemoryTester(object): def init_store(self): return InMemoryFeatureStore() + @property + def supports_prefix(self): + return False + class RedisTester(object): redis_host = 'localhost' @@ -23,19 +27,27 @@ class RedisTester(object): def __init__(self, cache_config): self._cache_config = cache_config - def init_store(self): + def init_store(self, prefix=None): self._clear_data() - return Redis.new_feature_store(caching=self._cache_config) + return Redis.new_feature_store(caching=self._cache_config, prefix=prefix) + + @property + def supports_prefix(self): + return True def _clear_data(self): r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) - r.delete("launchdarkly:features") + r.flushdb() class RedisWithDeprecatedConstructorTester(RedisTester): - def init_store(self): + def init_store(self, prefix=None): self._clear_data() - return RedisFeatureStore(expiration=(30 if self._cache_config.enabled else 0)) + return RedisFeatureStore(expiration=(30 if self._cache_config.enabled else 0), prefix=prefix) + + @property + def supports_prefix(self): + return True class DynamoDBTester(object): @@ -51,10 +63,14 @@ class DynamoDBTester(object): def __init__(self, cache_config): self._cache_config = cache_config - def init_store(self): + def init_store(self, prefix=None): self._create_table() self._clear_data() - return DynamoDB.new_feature_store(self.table_name, dynamodb_opts=self.options) + return DynamoDB.new_feature_store(self.table_name, prefix=prefix, dynamodb_opts=self.options) + + @property + def supports_prefix(self): + return True def _create_table(self): if self.table_created: @@ -131,6 +147,10 @@ class TestFeatureStore: DynamoDBTester(CacheConfig.disabled()) ] + @pytest.fixture(params=params) + def tester(self, request): + return request.param + @pytest.fixture(params=params) def store(self, request): return request.param.init_store() @@ -230,6 +250,19 @@ def test_upsert_older_version_after_delete(self, store): store.upsert(FEATURES, old_ver) assert store.get(FEATURES, 'foo', lambda x: x) is None + def test_stores_with_different_prefixes_are_independent(self, tester): + if not tester.supports_prefix: + return + store_a = tester.init_store('a') + store_b = tester.init_store('b') + flag = { 'key': 'flag', 'version': 1 } + store_a.init({ FEATURES: { flag['key']: flag } }) + store_b.init({ FEATURES: { } }) + item = store_a.get(FEATURES, flag['key'], lambda x: x) + assert item == flag + item = store_b.get(FEATURES, flag['key'], lambda x: x) + assert item is None + class TestRedisFeatureStoreExtraTests: def test_upsert_race_condition_against_external_client_with_higher_version(self): From cc938e33221b35daf612b10c881e87c5b5b60056 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 16:14:39 -0800 Subject: [PATCH 30/50] better database prefix test --- testing/test_feature_store.py | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index f6912ff3..6c0f0c5e 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -251,17 +251,36 @@ def test_upsert_older_version_after_delete(self, store): assert store.get(FEATURES, 'foo', lambda x: x) is None def test_stores_with_different_prefixes_are_independent(self, tester): + # This verifies that init, get, and upsert are all correctly using the specified key prefix. if not tester.supports_prefix: return + + flag_a1 = { 'key': 'flagA1', 'version': 1 } + flag_a2 = { 'key': 'flagA2', 'version': 1 } + flag_b1 = { 'key': 'flagB1', 'version': 1 } + flag_b2 = { 'key': 'flagB2', 'version': 1 } store_a = tester.init_store('a') store_b = tester.init_store('b') - flag = { 'key': 'flag', 'version': 1 } - store_a.init({ FEATURES: { flag['key']: flag } }) - store_b.init({ FEATURES: { } }) - item = store_a.get(FEATURES, flag['key'], lambda x: x) - assert item == flag - item = store_b.get(FEATURES, flag['key'], lambda x: x) + + store_a.init({ FEATURES: { 'flagA1': flag_a1 } }) + store_a.upsert(FEATURES, flag_a2) + + store_b.init({ FEATURES: { 'flagB1': flag_b1 } }) + store_b.upsert(FEATURES, flag_b2) + + item = store_a.get(FEATURES, 'flagA1', lambda x: x) + assert item == flag_a1 + item = store_a.get(FEATURES, 'flagB1', lambda x: x) + assert item is None + items = store_a.all(FEATURES, lambda x: x) + assert items == { 'flagA1': flag_a1, 'flagA2': flag_a2 } + + item = store_b.get(FEATURES, 'flagB1', lambda x: x) + assert item == flag_b1 + item = store_b.get(FEATURES, 'flagA1', lambda x: x) assert item is None + items = store_b.all(FEATURES, lambda x: x) + assert items == { 'flagB1': flag_b1, 'flagB2': flag_b2 } class TestRedisFeatureStoreExtraTests: From 5b8b33745521e5909d01fa2982a66b4b28901cb7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 16:20:32 -0800 Subject: [PATCH 31/50] clarify comment --- testing/test_feature_store.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 6c0f0c5e..35a2ef6e 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -251,7 +251,8 @@ def test_upsert_older_version_after_delete(self, store): assert store.get(FEATURES, 'foo', lambda x: x) is None def test_stores_with_different_prefixes_are_independent(self, tester): - # This verifies that init, get, and upsert are all correctly using the specified key prefix. + # This verifies that init(), get(), all(), and upsert() are all correctly using the specified key prefix. + # The delete() method isn't tested separately because it's implemented as a variant of upsert(). if not tester.supports_prefix: return From f9ce243f9e6e49dadae858fd2bfc654f41b56f7c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 17:47:16 -0800 Subject: [PATCH 32/50] add Consul feature store integration --- .circleci/config.yml | 11 +- consul-requirements.txt | 1 + ldclient/impl/__init__.py | 0 ldclient/impl/integrations/__init__.py | 0 ldclient/impl/integrations/consul/__init__.py | 0 .../impl/integrations/consul/feature_store.py | 125 ++++++++++++++++++ ldclient/integrations.py | 36 +++++ testing/test_feature_store.py | 37 +++++- 8 files changed, 207 insertions(+), 3 deletions(-) create mode 100644 consul-requirements.txt create mode 100644 ldclient/impl/__init__.py create mode 100644 ldclient/impl/integrations/__init__.py create mode 100644 ldclient/impl/integrations/consul/__init__.py create mode 100644 ldclient/impl/integrations/consul/feature_store.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 92699a3c..5c83ba64 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,13 +17,16 @@ test-template: &test-template command: | sudo pip install --upgrade pip setuptools; sudo pip install -r test-requirements.txt; + if [[ "$CIRCLE_JOB != test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then + sudo pip install -r consul-requirements.text; + fi; sudo python setup.py install; pip freeze - run: name: run tests command: | mkdir test-reports; - if [[ $CIRCLE_JOB == test-2.7 ]]; then + if [[ "$CIRCLE_JOB" == "test-2.7" ]]; then pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing; sh -c '[ -n "${CODECLIMATE_REPO_TOKEN+1}" ] && codeclimate-test-reporter || echo "No CODECLIMATE_REPO_TOKEN value is set; not publishing coverage report"'; else @@ -41,33 +44,39 @@ jobs: - image: circleci/python:2.7-jessie - image: redis - image: amazon/dynamodb-local + - image: consul test-3.3: <<: *test-template docker: - image: circleci/python:3.3-jessie - image: redis - image: amazon/dynamodb-local + # python-consul doesn't support Python 3.3 test-3.4: <<: *test-template docker: - image: circleci/python:3.4-jessie - image: redis - image: amazon/dynamodb-local + # python-consul doesn't support Python 3.4 test-3.5: <<: *test-template docker: - image: circleci/python:3.5-jessie - image: redis - image: amazon/dynamodb-local + - image: consul test-3.6: <<: *test-template docker: - image: circleci/python:3.6-jessie - image: redis - image: amazon/dynamodb-local + - image: consul test-3.7: <<: *test-template docker: - image: circleci/python:3.7-stretch - image: redis - image: amazon/dynamodb-local + - image: consul diff --git a/consul-requirements.txt b/consul-requirements.txt new file mode 100644 index 00000000..637f7fe1 --- /dev/null +++ b/consul-requirements.txt @@ -0,0 +1 @@ +python-consul>=1.0.1 diff --git a/ldclient/impl/__init__.py b/ldclient/impl/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/impl/integrations/__init__.py b/ldclient/impl/integrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/impl/integrations/consul/__init__.py b/ldclient/impl/integrations/consul/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/impl/integrations/consul/feature_store.py b/ldclient/impl/integrations/consul/feature_store.py new file mode 100644 index 00000000..5fe2d8ad --- /dev/null +++ b/ldclient/impl/integrations/consul/feature_store.py @@ -0,0 +1,125 @@ +import json + +have_consul = False +try: + import consul + have_consul = True +except ImportError: + pass + +from ldclient import log +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.interfaces import FeatureStore, FeatureStoreCore + +# +# Internal implementation of the Consul feature store. +# +# Implementation notes: +# +# * Feature flags, segments, and any other kind of entity the LaunchDarkly client may wish +# to store, are stored as individual items with the key "{prefix}/features/{flag-key}", +# "{prefix}/segments/{segment-key}", etc. +# +# * The special key "{prefix}/$inited" indicates that the store contains a complete data set. +# +# * Since Consul has limited support for transactions (they can't contain more than 64 +# operations), the init method-- which replaces the entire data store-- is not guaranteed to +# be atomic, so there can be a race condition if another process is adding new data via +# Upsert. To minimize this, we don't delete all the data at the start; instead, we update +# the items we've received, and then delete all other items. That could potentially result in +# deleting new data from another process, but that would be the case anyway if the Init +# happened to execute later than the Upsert; we are relying on the fact that normally the +# process that did the Init will also receive the new data shortly and do its own Upsert. +# + +class _ConsulFeatureStoreCore(FeatureStoreCore): + def __init__(self, host, port, prefix, consul_opts): + if not have_consul: + raise NotImplementedError("Cannot use Consul feature store because the python-consul package is not installed") + opts = consul_opts or {} + if host is not None: + opts['host'] = host + if port is not None: + opts['port'] = port + self._prefix = ("launchdarkly" if prefix is None else prefix) + "/" + self._client = consul.Consul(**opts) + + def init_internal(self, all_data): + # Start by reading the existing keys; we will later delete any of these that weren't in all_data. + index, keys = self._client.kv.get(self._prefix, recurse=True, keys=True) + unused_old_keys = set(keys or []) + + num_items = 0 + inited_key = self._inited_key() + unused_old_keys.discard(inited_key) + + # Insert or update every provided item. Note that this Consul client doesn't support batch + # operations (the "txn" method), so we'll write them one at a time. + for kind, items in all_data.items(): + for key, item in items.items(): + encoded_item = json.dumps(item) + db_key = self._item_key(kind, item['key']) + self._client.kv.put(db_key, encoded_item) + unused_old_keys.discard(db_key) + num_items = num_items + 1 + + # Now delete any previously existing items whose keys were not in the current data + for key in unused_old_keys: + self._client.kv.delete(key) + + # Now set the special key that we check in initialized_internal() + self._client.kv.put(inited_key, "") + + log.info('Initialized Consul store with %d items', num_items) + + def get_internal(self, kind, key): + index, resp = self._client.kv.get(self._item_key(kind, key)) + return None if resp is None else json.loads(resp['Value']) + + def get_all_internal(self, kind): + items_out = {} + index, results = self._client.kv.get(self._kind_key(kind), recurse=True) + for result in results: + item = json.loads(result['Value']) + items_out[item['key']] = item + return items_out + + def upsert_internal(self, kind, new_item): + key = self._item_key(kind, new_item['key']) + encoded_item = json.dumps(new_item) + + # We will potentially keep retrying indefinitely until someone's write succeeds + while True: + index, old_value = self._client.kv.get(key) + if old_value is None: + mod_index = 0 + else: + old_item = json.loads(old_value['Value']) + # Check whether the item is stale. If so, don't do the update (and return the existing item to + # CachingStoreWrapper so it can be cached) + if old_item['version'] >= new_item['version']: + return old_item + mod_index = old_value['ModifyIndex'] + + # Otherwise, try to write. We will do a compare-and-set operation, so the write will only succeed if + # the key's ModifyIndex is still equal to the previous value. If the previous ModifyIndex was zero, + # it means the key did not previously exist and the write will only succeed if it still doesn't exist. + success = self._client.kv.put(key, encoded_item, cas=mod_index) + if success: + return new_item + + log.debug('Concurrent modification detected, retrying') + + def initialized_internal(self): + index, resp = self._client.kv.get(self._inited_key()) + return (resp is not None) + + def _kind_key(self, kind): + return self._prefix + kind.namespace + + def _item_key(self, kind, key): + return self._kind_key(kind) + '/' + key + + def _inited_key(self): + return self._prefix + ('$inited') diff --git a/ldclient/integrations.py b/ldclient/integrations.py index 63c01202..aa74da1e 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -1,9 +1,41 @@ from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.impl.integrations.consul.feature_store import _ConsulFeatureStoreCore from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore from ldclient.redis_feature_store import _RedisFeatureStoreCore +class Consul(object): + """Provides factory methods for integrations between the LaunchDarkly SDK and Consul. + """ + + @staticmethod + def new_feature_store(host=None, + port=None, + prefix=None, + consul_opts=None, + caching=CacheConfig.default()): + """Creates a Consul-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + For more details about how and why you can use a persistent feature store, see the + SDK reference guide: https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store + + To use this method, you must first install the `python-consul` package. Then, put the object + returned by this method into the `feature_store` property of your client configuration + (:class:ldclient.config.Config). + + :param string host: Hostname of the Consul server (uses "localhost" if omitted) + :param int port: Port of the Consul server (uses 8500 if omitted) + :param string prefix: An optional namespace prefix to be prepended to all Consul keys + :param dict consul_opts: Optional parameters for configuring the Consul client, if you need + to set any of them besides host and port, as defined in the python-consul API; see + https://python-consul.readthedocs.io/en/latest/#consul + :param CacheConfig caching: Specifies whether local caching should be enabled and if so, + sets the cache properties; defaults to `CacheConfig.default()` + """ + core = _ConsulFeatureStoreCore(host, port, prefix, consul_opts) + return CachingStoreWrapper(core, caching) + + class DynamoDB(object): """Provides factory methods for integrations between the LaunchDarkly SDK and DynamoDB. """ @@ -14,6 +46,8 @@ def new_feature_store(table_name, dynamodb_opts={}, caching=CacheConfig.default()): """Creates a DynamoDB-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + For more details about how and why you can use a persistent feature store, see the + SDK reference guide: https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store To use this method, you must first install the `boto3` package containing the AWS SDK gems. Then, put the object returned by this method into the `feature_store` property of your @@ -52,6 +86,8 @@ def new_feature_store(url='redis://localhost:6379/0', max_connections=16, caching=CacheConfig.default()): """Creates a Redis-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + For more details about how and why you can use a persistent feature store, see the + SDK reference guide: https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store To use this method, you must first install the `redis` package. Then, put the object returned by this method into the `feature_store` property of your client configuration diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 35a2ef6e..76a7f41e 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -1,12 +1,21 @@ import boto3 +import consul import json import pytest import redis import time +# Consul is only supported in some Python versions +have_consul = False +try: + import consul + have_consul = True +except ImportError: + pass + from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore, _DynamoDBHelpers from ldclient.feature_store import CacheConfig, InMemoryFeatureStore -from ldclient.integrations import DynamoDB, Redis +from ldclient.integrations import Consul, DynamoDB, Redis from ldclient.redis_feature_store import RedisFeatureStore from ldclient.versioned_data_kind import FEATURES @@ -50,6 +59,25 @@ def supports_prefix(self): return True +class ConsulTester(object): + def __init__(self, cache_config): + self._cache_config = cache_config + + def init_store(self, prefix=None): + self._clear_data(prefix or "launchdarkly") + return Consul.new_feature_store(prefix=prefix, caching=self._cache_config) + + @property + def supports_prefix(self): + return True + + def _clear_data(self, prefix): + client = consul.Consul() + index, keys = client.kv.get(prefix + "/", recurse=True, keys=True) + for key in (keys or []): + client.kv.delete(key) + + class DynamoDBTester(object): table_name = 'LD_DYNAMODB_TEST_TABLE' table_created = False @@ -66,7 +94,8 @@ def __init__(self, cache_config): def init_store(self, prefix=None): self._create_table() self._clear_data() - return DynamoDB.new_feature_store(self.table_name, prefix=prefix, dynamodb_opts=self.options) + return DynamoDB.new_feature_store(self.table_name, prefix=prefix, dynamodb_opts=self.options, + caching=self._cache_config) @property def supports_prefix(self): @@ -147,6 +176,10 @@ class TestFeatureStore: DynamoDBTester(CacheConfig.disabled()) ] + if have_consul: + params.append(ConsulTester(CacheConfig.default())) + params.append(ConsulTester(CacheConfig.disabled())) + @pytest.fixture(params=params) def tester(self, request): return request.param From 89a96be19b24163292c0b00a46638325f3cf780e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 17:49:25 -0800 Subject: [PATCH 33/50] typo --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5c83ba64..8671b022 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -18,7 +18,7 @@ test-template: &test-template sudo pip install --upgrade pip setuptools; sudo pip install -r test-requirements.txt; if [[ "$CIRCLE_JOB != test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then - sudo pip install -r consul-requirements.text; + sudo pip install -r consul-requirements.txt; fi; sudo python setup.py install; pip freeze From da8c1a67b8492e30800f411a1616538f8ee665e2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 17:53:21 -0800 Subject: [PATCH 34/50] rm extra import --- testing/test_feature_store.py | 1 - 1 file changed, 1 deletion(-) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 76a7f41e..6370a848 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -1,5 +1,4 @@ import boto3 -import consul import json import pytest import redis From b19e6188d834c5e1997050200e0c59b9664a842a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 18:04:15 -0800 Subject: [PATCH 35/50] fix byte/string issue and rename file --- .../consul/{feature_store.py => consul_feature_store.py} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename ldclient/impl/integrations/consul/{feature_store.py => consul_feature_store.py} (97%) diff --git a/ldclient/impl/integrations/consul/feature_store.py b/ldclient/impl/integrations/consul/consul_feature_store.py similarity index 97% rename from ldclient/impl/integrations/consul/feature_store.py rename to ldclient/impl/integrations/consul/consul_feature_store.py index 5fe2d8ad..6fc8652e 100644 --- a/ldclient/impl/integrations/consul/feature_store.py +++ b/ldclient/impl/integrations/consul/consul_feature_store.py @@ -75,13 +75,13 @@ def init_internal(self, all_data): def get_internal(self, kind, key): index, resp = self._client.kv.get(self._item_key(kind, key)) - return None if resp is None else json.loads(resp['Value']) + return None if resp is None else json.loads(resp['Value'].decode('utf-8')) def get_all_internal(self, kind): items_out = {} index, results = self._client.kv.get(self._kind_key(kind), recurse=True) for result in results: - item = json.loads(result['Value']) + item = json.loads(result['Value'].decode('utf-8')) items_out[item['key']] = item return items_out @@ -95,7 +95,7 @@ def upsert_internal(self, kind, new_item): if old_value is None: mod_index = 0 else: - old_item = json.loads(old_value['Value']) + old_item = json.loads(old_value['Value'].decode('utf-8')) # Check whether the item is stale. If so, don't do the update (and return the existing item to # CachingStoreWrapper so it can be cached) if old_item['version'] >= new_item['version']: From db621dc4d72d90b87a6474a06cf010a55b3d3bf2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 18:04:36 -0800 Subject: [PATCH 36/50] rename file --- ldclient/integrations.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ldclient/integrations.py b/ldclient/integrations.py index aa74da1e..d2d55354 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -1,6 +1,6 @@ from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper -from ldclient.impl.integrations.consul.feature_store import _ConsulFeatureStoreCore +from ldclient.impl.integrations.consul.consul_feature_store import _ConsulFeatureStoreCore from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore from ldclient.redis_feature_store import _RedisFeatureStoreCore @@ -23,6 +23,9 @@ def new_feature_store(host=None, returned by this method into the `feature_store` property of your client configuration (:class:ldclient.config.Config). + Note that `python-consul` is not available for Python 3.3 or 3.4, so this feature cannot be + used in those Python versions. + :param string host: Hostname of the Consul server (uses "localhost" if omitted) :param int port: Port of the Consul server (uses 8500 if omitted) :param string prefix: An optional namespace prefix to be prepended to all Consul keys From b09e07eabba1410adba388cce7980488238dba8a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 18:04:45 -0800 Subject: [PATCH 37/50] docs --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index d25ee307..61e67050 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,9 @@ The SDK is tested with the most recent patch releases of Python 2.7, 3.3, 3.4, 3 Database integrations --------------------- -Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `DynamoDB` and `Redis` classes in `ldclient.integrations`; to use them, call the `new_feature_store` method in the appropriate class, and put the returned object in the `feature_store` property of your client configuration. See [`ldclient.integrations`](https://github.com/launchdarkly/python-client-private/blob/master/ldclient/integrations.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. +Feature flag data can be kept in a persistent store using Consul, DynamoDB, or Redis. These adapters are implemented in the `Consul`, `DynamoDB` and `Redis` classes in `ldclient.integrations`; to use them, call the `new_feature_store` method in the appropriate class, and put the returned object in the `feature_store` property of your client configuration. See [`ldclient.integrations`](https://github.com/launchdarkly/python-client-private/blob/master/ldclient/integrations.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. + +Note that Consul is not supported in Python 3.3 or 3.4. Using flag data from a file --------------------------- From 9ea89ca60c501c4795e663ef0b36738e082fb3ae Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 18:09:42 -0800 Subject: [PATCH 38/50] script typo --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8671b022..714c5ee1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,7 +17,7 @@ test-template: &test-template command: | sudo pip install --upgrade pip setuptools; sudo pip install -r test-requirements.txt; - if [[ "$CIRCLE_JOB != test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then + if [[ "$CIRCLE_JOB" != "test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then sudo pip install -r consul-requirements.txt; fi; sudo python setup.py install; From a50e6f35d14de0b0689ee49d419f63b51bd049b4 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 18:30:38 -0800 Subject: [PATCH 39/50] move all low-level feature store integration code into submodules --- .../impl/integrations/dynamodb/__init__.py | 0 .../dynamodb}/dynamodb_feature_store.py | 0 ldclient/impl/integrations/redis/__init__.py | 0 .../integrations/redis/redis_feature_store.py | 101 +++++++++++++++++ ldclient/integrations.py | 9 +- ldclient/redis_feature_store.py | 107 +----------------- testing/test_feature_store.py | 2 +- 7 files changed, 112 insertions(+), 107 deletions(-) create mode 100644 ldclient/impl/integrations/dynamodb/__init__.py rename ldclient/{ => impl/integrations/dynamodb}/dynamodb_feature_store.py (100%) create mode 100644 ldclient/impl/integrations/redis/__init__.py create mode 100644 ldclient/impl/integrations/redis/redis_feature_store.py diff --git a/ldclient/impl/integrations/dynamodb/__init__.py b/ldclient/impl/integrations/dynamodb/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/dynamodb_feature_store.py b/ldclient/impl/integrations/dynamodb/dynamodb_feature_store.py similarity index 100% rename from ldclient/dynamodb_feature_store.py rename to ldclient/impl/integrations/dynamodb/dynamodb_feature_store.py diff --git a/ldclient/impl/integrations/redis/__init__.py b/ldclient/impl/integrations/redis/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/impl/integrations/redis/redis_feature_store.py b/ldclient/impl/integrations/redis/redis_feature_store.py new file mode 100644 index 00000000..f0be83a4 --- /dev/null +++ b/ldclient/impl/integrations/redis/redis_feature_store.py @@ -0,0 +1,101 @@ +import json + +have_redis = False +try: + import redis + have_redis = True +except ImportError: + pass + +from ldclient import log +from ldclient.interfaces import FeatureStoreCore +from ldclient.versioned_data_kind import FEATURES + + +class _RedisFeatureStoreCore(FeatureStoreCore): + def __init__(self, url, prefix, max_connections): + if not have_redis: + raise NotImplementedError("Cannot use Redis feature store because redis package is not installed") + self._prefix = prefix or 'launchdarkly' + self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) + self.test_update_hook = None # exposed for testing + log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + self._prefix) + + def _items_key(self, kind): + return "{0}:{1}".format(self._prefix, kind.namespace) + + def init_internal(self, all_data): + pipe = redis.Redis(connection_pool=self._pool).pipeline() + + all_count = 0 + + for kind, items in all_data.items(): + base_key = self._items_key(kind) + pipe.delete(base_key) + for key, item in items.items(): + item_json = json.dumps(item) + pipe.hset(base_key, key, item_json) + all_count = all_count + len(items) + pipe.execute() + log.info("Initialized RedisFeatureStore with %d items", all_count) + + def get_all_internal(self, kind): + r = redis.Redis(connection_pool=self._pool) + all_items = r.hgetall(self._items_key(kind)) + + if all_items is None or all_items is "": + all_items = {} + + results = {} + for key, item_json in all_items.items(): + key = key.decode('utf-8') # necessary in Python 3 + results[key] = json.loads(item_json.decode('utf-8')) + return results + + def get_internal(self, kind, key): + r = redis.Redis(connection_pool=self._pool) + item_json = r.hget(self._items_key(kind), key) + + if item_json is None or item_json is "": + log.debug("RedisFeatureStore: key %s not found in '%s'. Returning None.", key, kind.namespace) + return None + + return json.loads(item_json.decode('utf-8')) + + def upsert_internal(self, kind, item): + r = redis.Redis(connection_pool=self._pool) + base_key = self._items_key(kind) + key = item['key'] + item_json = json.dumps(item) + + while True: + pipeline = r.pipeline() + pipeline.watch(base_key) + old = self.get_internal(kind, key) + if self.test_update_hook is not None: + self.test_update_hook(base_key, key) + if old and old['version'] >= item['version']: + log.debug('RedisFeatureStore: Attempted to %s key: %s version %d with a version that is the same or older: %d in "%s"', + 'delete' if item.get('deleted') else 'update', + key, old['version'], item['version'], kind.namespace) + pipeline.unwatch() + return old + else: + pipeline.multi() + pipeline.hset(base_key, key, item_json) + try: + pipeline.execute() + # Unlike Redis implementations for other platforms, in redis-py a failed WATCH + # produces an exception rather than a null result from execute(). + except redis.exceptions.WatchError: + log.debug("RedisFeatureStore: concurrent modification detected, retrying") + continue + return item + + def initialized_internal(self): + r = redis.Redis(connection_pool=self._pool) + return r.exists(self._items_key(FEATURES)) + + def _before_update_transaction(self, base_key, key): + # exposed for testing + pass diff --git a/ldclient/integrations.py b/ldclient/integrations.py index d2d55354..5cfc468b 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -1,14 +1,17 @@ from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper from ldclient.impl.integrations.consul.consul_feature_store import _ConsulFeatureStoreCore -from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore -from ldclient.redis_feature_store import _RedisFeatureStoreCore +from ldclient.impl.integrations.dynamodb.dynamodb_feature_store import _DynamoDBFeatureStoreCore +from ldclient.impl.integrations.redis.redis_feature_store import _RedisFeatureStoreCore class Consul(object): """Provides factory methods for integrations between the LaunchDarkly SDK and Consul. """ + """The key prefix that is used if you do not specify one.""" + DEFAULT_PREFIX = "launchdarkly" + @staticmethod def new_feature_store(host=None, port=None, @@ -28,7 +31,7 @@ def new_feature_store(host=None, :param string host: Hostname of the Consul server (uses "localhost" if omitted) :param int port: Port of the Consul server (uses 8500 if omitted) - :param string prefix: An optional namespace prefix to be prepended to all Consul keys + :param string prefix: A namespace prefix to be prepended to all Consul keys :param dict consul_opts: Optional parameters for configuring the Consul client, if you need to set any of them besides host and port, as defined in the python-consul API; see https://python-consul.readthedocs.io/en/latest/#consul diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index 16302212..ff93c402 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -1,17 +1,8 @@ -import json +from ldclient.impl.integrations.redis.redis_feature_store import _RedisFeatureStoreCore -have_redis = False -try: - import redis - have_redis = True -except ImportError: - pass - -from ldclient import log from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper -from ldclient.interfaces import FeatureStore, FeatureStoreCore -from ldclient.versioned_data_kind import FEATURES +from ldclient.interfaces import FeatureStore # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating @@ -22,8 +13,8 @@ class RedisFeatureStore(FeatureStore): """A Redis-backed implementation of :class:`ldclient.feature_store.FeatureStore`. - This implementation class is deprecated and may be changed or removed in the future. Please use - :func:`ldclient.integrations.Redis.new_feature_store()`. + This module and this implementation class are deprecated and may be changed or removed in the future. + Please use :func:`ldclient.integrations.Redis.new_feature_store()`. """ def __init__(self, url='redis://localhost:6379/0', @@ -31,8 +22,6 @@ def __init__(self, max_connections=16, expiration=15, capacity=1000): - if not have_redis: - raise NotImplementedError("Cannot use Redis feature store because redis package is not installed") self.core = _RedisFeatureStoreCore(url, prefix, max_connections) # exposed for testing self._wrapper = CachingStoreWrapper(self.core, CacheConfig(expiration=expiration, capacity=capacity)) @@ -54,91 +43,3 @@ def delete(self, kind, key, version): @property def initialized(self): return self._wrapper.initialized - - -class _RedisFeatureStoreCore(FeatureStoreCore): - def __init__(self, url, prefix, max_connections): - - self._prefix = prefix or 'launchdarkly' - self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) - self.test_update_hook = None # exposed for testing - log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + self._prefix) - - def _items_key(self, kind): - return "{0}:{1}".format(self._prefix, kind.namespace) - - def init_internal(self, all_data): - pipe = redis.Redis(connection_pool=self._pool).pipeline() - - all_count = 0 - - for kind, items in all_data.items(): - base_key = self._items_key(kind) - pipe.delete(base_key) - for key, item in items.items(): - item_json = json.dumps(item) - pipe.hset(base_key, key, item_json) - all_count = all_count + len(items) - pipe.execute() - log.info("Initialized RedisFeatureStore with %d items", all_count) - - def get_all_internal(self, kind): - r = redis.Redis(connection_pool=self._pool) - all_items = r.hgetall(self._items_key(kind)) - - if all_items is None or all_items is "": - all_items = {} - - results = {} - for key, item_json in all_items.items(): - key = key.decode('utf-8') # necessary in Python 3 - results[key] = json.loads(item_json.decode('utf-8')) - return results - - def get_internal(self, kind, key): - r = redis.Redis(connection_pool=self._pool) - item_json = r.hget(self._items_key(kind), key) - - if item_json is None or item_json is "": - log.debug("RedisFeatureStore: key %s not found in '%s'. Returning None.", key, kind.namespace) - return None - - return json.loads(item_json.decode('utf-8')) - - def upsert_internal(self, kind, item): - r = redis.Redis(connection_pool=self._pool) - base_key = self._items_key(kind) - key = item['key'] - item_json = json.dumps(item) - - while True: - pipeline = r.pipeline() - pipeline.watch(base_key) - old = self.get_internal(kind, key) - if self.test_update_hook is not None: - self.test_update_hook(base_key, key) - if old and old['version'] >= item['version']: - log.debug('RedisFeatureStore: Attempted to %s key: %s version %d with a version that is the same or older: %d in "%s"', - 'delete' if item.get('deleted') else 'update', - key, old['version'], item['version'], kind.namespace) - pipeline.unwatch() - return old - else: - pipeline.multi() - pipeline.hset(base_key, key, item_json) - try: - pipeline.execute() - # Unlike Redis implementations for other platforms, in redis-py a failed WATCH - # produces an exception rather than a null result from execute(). - except redis.exceptions.WatchError: - log.debug("RedisFeatureStore: concurrent modification detected, retrying") - continue - return item - - def initialized_internal(self): - r = redis.Redis(connection_pool=self._pool) - return r.exists(self._items_key(FEATURES)) - - def _before_update_transaction(self, base_key, key): - # exposed for testing - pass diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 6370a848..ce0150cf 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -12,8 +12,8 @@ except ImportError: pass -from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore, _DynamoDBHelpers from ldclient.feature_store import CacheConfig, InMemoryFeatureStore +from ldclient.impl.integrations.dynamodb.dynamodb_feature_store import _DynamoDBFeatureStoreCore, _DynamoDBHelpers from ldclient.integrations import Consul, DynamoDB, Redis from ldclient.redis_feature_store import RedisFeatureStore from ldclient.versioned_data_kind import FEATURES From 0baddab8a068d034ca73b5ae72b1aa304cb94314 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 13:20:37 -0800 Subject: [PATCH 40/50] move file data source implementation --- ldclient/file_data_source.py | 255 +----------------- ldclient/impl/integrations/files/__init__.py | 0 .../integrations/files/file_data_source.py | 172 ++++++++++++ ldclient/integrations.py | 105 ++++++++ testing/test_file_data_source.py | 8 +- 5 files changed, 290 insertions(+), 250 deletions(-) create mode 100644 ldclient/impl/integrations/files/__init__.py create mode 100644 ldclient/impl/integrations/files/file_data_source.py diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index ebff765b..61088d50 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -1,29 +1,4 @@ -import json -import os -import six -import traceback - -have_yaml = False -try: - import yaml - have_yaml = True -except ImportError: - pass - -have_watchdog = False -try: - import watchdog - import watchdog.events - import watchdog.observers - have_watchdog = True -except ImportError: - pass - -from ldclient.interfaces import UpdateProcessor -from ldclient.repeating_timer import RepeatingTimer -from ldclient.util import log -from ldclient.versioned_data_kind import FEATURES, SEGMENTS - +from ldclient.impl.integrations.files.file_data_source import _FileDataSource class FileDataSource(UpdateProcessor): @classmethod @@ -32,80 +7,9 @@ def factory(cls, **kwargs): used in a test environment, to operate using a predetermined feature flag state without an actual LaunchDarkly connection. - To use this component, call `FileDataSource.factory`, and store its return value in the - `update_processor_class` property of your LaunchDarkly client configuration. In the options - to `factory`, set `paths` to the file path(s) of your data file(s): - :: - - factory = FileDataSource.factory(paths = [ myFilePath ]) - config = Config(update_processor_class = factory) - - This will cause the client not to connect to LaunchDarkly to get feature flags. The - client may still make network connections to send analytics events, unless you have disabled - this with Config.send_events or Config.offline. - - Flag data files can be either JSON or YAML (in order to use YAML, you must install the 'pyyaml' - package). They contain an object with three possible properties: - - * "flags": Feature flag definitions. - * "flagValues": Simplified feature flags that contain only a value. - * "segments": User segment definitions. - - The format of the data in "flags" and "segments" is defined by the LaunchDarkly application - and is subject to change. Rather than trying to construct these objects yourself, it is simpler - to request existing flags directly from the LaunchDarkly server in JSON format, and use this - output as the starting point for your file. In Linux you would do this: - :: - - curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all - - The output will look something like this (but with many more properties): - :: - - { - "flags": { - "flag-key-1": { - "key": "flag-key-1", - "on": true, - "variations": [ "a", "b" ] - } - }, - "segments": { - "segment-key-1": { - "key": "segment-key-1", - "includes": [ "user-key-1" ] - } - } - } - - Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported - by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to - set specific flag keys to specific values. For that, you can use a much simpler format: - :: - - { - "flagValues": { - "my-string-flag-key": "value-1", - "my-boolean-flag-key": true, - "my-integer-flag-key": 3 - } - } - - Or, in YAML: - :: - - flagValues: - my-string-flag-key: "value-1" - my-boolean-flag-key: true - my-integer-flag-key: 1 - - It is also possible to specify both "flags" and "flagValues", if you want some flags - to have simple values and others to have complex behavior. However, it is an error to use the - same flag key or segment key more than once, either in a single file or across multiple files. - - If the data source encounters any error in any file-- malformed content, a missing file, or a - duplicate key-- it will not load flags from any of the files. - + This module and this implementation class are deprecated and may be changed or removed in the future. + Please use :func:`ldclient.integrations.Files.new_data_source()`. + :param kwargs: See below @@ -123,150 +27,9 @@ def factory(cls, **kwargs): used only if auto_update is true, and if the native file-watching mechanism from 'watchdog' is not being used. The default value is 1 second. """ - return lambda config, store, ready : FileDataSource(store, kwargs, ready) - - def __init__(self, store, options, ready): - self._store = store - self._ready = ready - self._inited = False - self._paths = options.get('paths', []) - if isinstance(self._paths, six.string_types): - self._paths = [ self._paths ] - self._auto_update = options.get('auto_update', False) - self._auto_updater = None - self._poll_interval = options.get('poll_interval', 1) - self._force_polling = options.get('force_polling', False) # used only in tests - - def start(self): - self._load_all() - - if self._auto_update: - self._auto_updater = self._start_auto_updater() - - # We will signal readiness immediately regardless of whether the file load succeeded or failed - - # the difference can be detected by checking initialized() - self._ready.set() - - def stop(self): - if self._auto_updater: - self._auto_updater.stop() - - def initialized(self): - return self._inited - - def _load_all(self): - all_data = { FEATURES: {}, SEGMENTS: {} } - for path in self._paths: - try: - self._load_file(path, all_data) - except Exception as e: - log.error('Unable to load flag data from "%s": %s' % (path, repr(e))) - traceback.print_exc() - return - self._store.init(all_data) - self._inited = True - - def _load_file(self, path, all_data): - content = None - with open(path, 'r') as f: - content = f.read() - parsed = self._parse_content(content) - for key, flag in six.iteritems(parsed.get('flags', {})): - self._add_item(all_data, FEATURES, flag) - for key, value in six.iteritems(parsed.get('flagValues', {})): - self._add_item(all_data, FEATURES, self._make_flag_with_value(key, value)) - for key, segment in six.iteritems(parsed.get('segments', {})): - self._add_item(all_data, SEGMENTS, segment) - - def _parse_content(self, content): - if have_yaml: - return yaml.load(content) # pyyaml correctly parses JSON too - return json.loads(content) - - def _add_item(self, all_data, kind, item): - items = all_data[kind] - key = item.get('key') - if items.get(key) is None: - items[key] = item - else: - raise Exception('In %s, key "%s" was used more than once' % (kind.namespace, key)) - - def _make_flag_with_value(self, key, value): - return { - 'key': key, - 'on': True, - 'fallthrough': { - 'variation': 0 - }, - 'variations': [ value ] - } - - def _start_auto_updater(self): - resolved_paths = [] - for path in self._paths: - try: - resolved_paths.append(os.path.realpath(path)) - except: - log.warn('Cannot watch for changes to data file "%s" because it is an invalid path' % path) - if have_watchdog and not self._force_polling: - return FileDataSource.WatchdogAutoUpdater(resolved_paths, self._load_all) - else: - return FileDataSource.PollingAutoUpdater(resolved_paths, self._load_all, self._poll_interval) - - # Watch for changes to data files using the watchdog package. This uses native OS filesystem notifications - # if available for the current platform. - class WatchdogAutoUpdater(object): - def __init__(self, resolved_paths, reloader): - watched_files = set(resolved_paths) - - class LDWatchdogHandler(watchdog.events.FileSystemEventHandler): - def on_any_event(self, event): - if event.src_path in watched_files: - reloader() - - dir_paths = set() - for path in resolved_paths: - dir_paths.add(os.path.dirname(path)) - - self._observer = watchdog.observers.Observer() - handler = LDWatchdogHandler() - for path in dir_paths: - self._observer.schedule(handler, path) - self._observer.start() - - def stop(self): - self._observer.stop() - self._observer.join() - - # Watch for changes to data files by polling their modification times. This is used if auto-update is - # on but the watchdog package is not installed. - class PollingAutoUpdater(object): - def __init__(self, resolved_paths, reloader, interval): - self._paths = resolved_paths - self._reloader = reloader - self._file_times = self._check_file_times() - self._timer = RepeatingTimer(interval, self._poll) - self._timer.start() - - def stop(self): - self._timer.stop() - - def _poll(self): - new_times = self._check_file_times() - changed = False - for file_path, file_time in six.iteritems(self._file_times): - if new_times.get(file_path) is not None and new_times.get(file_path) != file_time: - changed = True - break - self._file_times = new_times - if changed: - self._reloader() - def _check_file_times(self): - ret = {} - for path in self._paths: - try: - ret[path] = os.path.getmtime(path) - except: - ret[path] = None - return ret + return lambda config, store, ready : _FileDataSource(store, ready, + paths=kwargs.get("paths"), + auto_update=kwargs.get("auto_update", False), + poll_interval=kwargs.get("poll_interval", 1), + force_polling=kwargs.get("force_polling", False)) diff --git a/ldclient/impl/integrations/files/__init__.py b/ldclient/impl/integrations/files/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/impl/integrations/files/file_data_source.py b/ldclient/impl/integrations/files/file_data_source.py new file mode 100644 index 00000000..9ba6e561 --- /dev/null +++ b/ldclient/impl/integrations/files/file_data_source.py @@ -0,0 +1,172 @@ +import json +import os +import six +import traceback + +have_yaml = False +try: + import yaml + have_yaml = True +except ImportError: + pass + +have_watchdog = False +try: + import watchdog + import watchdog.events + import watchdog.observers + have_watchdog = True +except ImportError: + pass + +from ldclient.interfaces import UpdateProcessor +from ldclient.repeating_timer import RepeatingTimer +from ldclient.util import log +from ldclient.versioned_data_kind import FEATURES, SEGMENTS + +class _FileDataSource(UpdateProcessor): + def __init__(self, store, ready, paths, auto_update, poll_interval, force_polling): + self._store = store + self._ready = ready + self._inited = False + self._paths = paths + if isinstance(self._paths, six.string_types): + self._paths = [ self._paths ] + self._auto_update = auto_update + self._auto_updater = None + self._poll_interval = poll_interval + self._force_polling = force_polling + + def start(self): + self._load_all() + + if self._auto_update: + self._auto_updater = self._start_auto_updater() + + # We will signal readiness immediately regardless of whether the file load succeeded or failed - + # the difference can be detected by checking initialized() + self._ready.set() + + def stop(self): + if self._auto_updater: + self._auto_updater.stop() + + def initialized(self): + return self._inited + + def _load_all(self): + all_data = { FEATURES: {}, SEGMENTS: {} } + for path in self._paths: + try: + self._load_file(path, all_data) + except Exception as e: + log.error('Unable to load flag data from "%s": %s' % (path, repr(e))) + traceback.print_exc() + return + self._store.init(all_data) + self._inited = True + + def _load_file(self, path, all_data): + content = None + with open(path, 'r') as f: + content = f.read() + parsed = self._parse_content(content) + for key, flag in six.iteritems(parsed.get('flags', {})): + self._add_item(all_data, FEATURES, flag) + for key, value in six.iteritems(parsed.get('flagValues', {})): + self._add_item(all_data, FEATURES, self._make_flag_with_value(key, value)) + for key, segment in six.iteritems(parsed.get('segments', {})): + self._add_item(all_data, SEGMENTS, segment) + + def _parse_content(self, content): + if have_yaml: + return yaml.load(content) # pyyaml correctly parses JSON too + return json.loads(content) + + def _add_item(self, all_data, kind, item): + items = all_data[kind] + key = item.get('key') + if items.get(key) is None: + items[key] = item + else: + raise Exception('In %s, key "%s" was used more than once' % (kind.namespace, key)) + + def _make_flag_with_value(self, key, value): + return { + 'key': key, + 'on': True, + 'fallthrough': { + 'variation': 0 + }, + 'variations': [ value ] + } + + def _start_auto_updater(self): + resolved_paths = [] + for path in self._paths: + try: + resolved_paths.append(os.path.realpath(path)) + except: + log.warn('Cannot watch for changes to data file "%s" because it is an invalid path' % path) + if have_watchdog and not self._force_polling: + return _FileDataSource.WatchdogAutoUpdater(resolved_paths, self._load_all) + else: + return _FileDataSource.PollingAutoUpdater(resolved_paths, self._load_all, self._poll_interval) + + # Watch for changes to data files using the watchdog package. This uses native OS filesystem notifications + # if available for the current platform. + class WatchdogAutoUpdater(object): + def __init__(self, resolved_paths, reloader): + watched_files = set(resolved_paths) + + class LDWatchdogHandler(watchdog.events.FileSystemEventHandler): + def on_any_event(self, event): + if event.src_path in watched_files: + reloader() + + dir_paths = set() + for path in resolved_paths: + dir_paths.add(os.path.dirname(path)) + + self._observer = watchdog.observers.Observer() + handler = LDWatchdogHandler() + for path in dir_paths: + self._observer.schedule(handler, path) + self._observer.start() + + def stop(self): + self._observer.stop() + self._observer.join() + + # Watch for changes to data files by polling their modification times. This is used if auto-update is + # on but the watchdog package is not installed. + class PollingAutoUpdater(object): + def __init__(self, resolved_paths, reloader, interval): + self._paths = resolved_paths + self._reloader = reloader + self._file_times = self._check_file_times() + self._timer = RepeatingTimer(interval, self._poll) + self._timer.start() + + def stop(self): + self._timer.stop() + + def _poll(self): + new_times = self._check_file_times() + changed = False + for file_path, file_time in six.iteritems(self._file_times): + if new_times.get(file_path) is not None and new_times.get(file_path) != file_time: + changed = True + break + self._file_times = new_times + if changed: + self._reloader() + + def _check_file_times(self): + ret = {} + for path in self._paths: + try: + ret[path] = os.path.getmtime(path) + except: + ret[path] = None + return ret diff --git a/ldclient/integrations.py b/ldclient/integrations.py index 5cfc468b..fcc89abc 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -2,6 +2,7 @@ from ldclient.feature_store_helpers import CachingStoreWrapper from ldclient.impl.integrations.consul.consul_feature_store import _ConsulFeatureStoreCore from ldclient.impl.integrations.dynamodb.dynamodb_feature_store import _DynamoDBFeatureStoreCore +from ldclient.impl.integrations.files.file_data_source import _FileDataSource from ldclient.impl.integrations.redis.redis_feature_store import _RedisFeatureStoreCore @@ -111,3 +112,107 @@ def new_feature_store(url='redis://localhost:6379/0', wrapper = CachingStoreWrapper(core, caching) wrapper.core = core # exposed for testing return wrapper + + +class Files(object): + """Provides factory methods for integrations with filesystem data. + """ + + @staticmethod + def new_data_source(paths, auto_update=False, poll_interval=1, force_polling=False): + """Provides a way to use local files as a source of feature flag state. This would typically be + used in a test environment, to operate using a predetermined feature flag state without an + actual LaunchDarkly connection. + + To use this component, call `new_data_source`, specifying the file path(s) of your data file(s) + in the `path` parameter; then put the value returned by this method into the `update_processor_class` + property of your LaunchDarkly client configuration (:class:ldclient.config.Config). + :: + + data_source = LaunchDarkly::Integrations::Files.new_data_source(paths=[ myFilePath ]) + config = Config(update_processor_class=data_source) + + This will cause the client not to connect to LaunchDarkly to get feature flags. The + client may still make network connections to send analytics events, unless you have disabled + this with Config.send_events or Config.offline. + + Flag data files can be either JSON or YAML (in order to use YAML, you must install the 'pyyaml' + package). They contain an object with three possible properties: + + * "flags": Feature flag definitions. + * "flagValues": Simplified feature flags that contain only a value. + * "segments": User segment definitions. + + The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + and is subject to change. Rather than trying to construct these objects yourself, it is simpler + to request existing flags directly from the LaunchDarkly server in JSON format, and use this + output as the starting point for your file. In Linux you would do this: + :: + + curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all + + The output will look something like this (but with many more properties): + :: + + { + "flags": { + "flag-key-1": { + "key": "flag-key-1", + "on": true, + "variations": [ "a", "b" ] + } + }, + "segments": { + "segment-key-1": { + "key": "segment-key-1", + "includes": [ "user-key-1" ] + } + } + } + + Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + set specific flag keys to specific values. For that, you can use a much simpler format: + :: + + { + "flagValues": { + "my-string-flag-key": "value-1", + "my-boolean-flag-key": true, + "my-integer-flag-key": 3 + } + } + + Or, in YAML: + :: + + flagValues: + my-string-flag-key: "value-1" + my-boolean-flag-key: true + my-integer-flag-key: 1 + + It is also possible to specify both "flags" and "flagValues", if you want some flags + to have simple values and others to have complex behavior. However, it is an error to use the + same flag key or segment key more than once, either in a single file or across multiple files. + + If the data source encounters any error in any file-- malformed content, a missing file, or a + duplicate key-- it will not load flags from any of the files. + + :param array paths: The paths of the source files for loading flag data. These may be absolute paths + or relative to the current working directory. Files will be parsed as JSON unless the 'pyyaml' + package is installed, in which case YAML is also allowed. + :param bool auto_update: (default: false) True if the data source should watch for changes to the source file(s) + and reload flags whenever there is a change. The default implementation of this feature is based on + polling the filesystem, which may not perform well; if you install the 'watchdog' package (not + included by default, to avoid adding unwanted dependencies to the SDK), its native file watching + mechanism will be used instead. Note that auto-updating will only work if all of the files you + specified have valid directory paths at startup time. + :param float poll_interval: (default: 1) The minimum interval, in seconds, between checks for file + modifications-- used only if `auto_update` is true, and if the native file-watching mechanism from + `watchdog` is not being used. + :param bool force_polling: (default: false) True if the data source should implement auto-update via + polling the filesystem even if a native mechanism is available. This is mainly for SDK testing. + + :return: an object (actually a lambda) to be stored in the `update_processor_class` configuration property + """ + return lambda config, store, ready : _FileDataSource(store, ready, paths, auto_update, poll_interval, force_polling) diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 68d1e5b7..2e232ec8 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -9,7 +9,7 @@ from ldclient.client import LDClient from ldclient.config import Config from ldclient.feature_store import InMemoryFeatureStore -from ldclient.file_data_source import FileDataSource +from ldclient.integrations import Files from ldclient.versioned_data_kind import FEATURES, SEGMENTS @@ -94,7 +94,7 @@ def teardown_function(): def make_data_source(**kwargs): global data_source - data_source = FileDataSource.factory(**kwargs)(Config(), store, ready) + data_source = Files.new_data_source(**kwargs)(Config(), store, ready) return data_source def make_temp_file(content): @@ -217,7 +217,7 @@ def test_reloads_modified_file_in_polling_mode(): def test_evaluates_full_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - factory = FileDataSource.factory(paths = path) + factory = Files.new_data_source(paths = path) client = LDClient(config=Config(update_processor_class = factory, send_events = False)) value = client.variation('flag1', { 'key': 'user' }, '') assert value == 'on' @@ -229,7 +229,7 @@ def test_evaluates_full_flag_with_client_as_expected(): def test_evaluates_simplified_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - factory = FileDataSource.factory(paths = path) + factory = Files.new_data_source(paths = path) client = LDClient(config=Config(update_processor_class = factory, send_events = False)) value = client.variation('flag2', { 'key': 'user' }, '') assert value == 'value2' From c8585baab7ee86b2087653451e47013c034b8cd6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 14:11:58 -0800 Subject: [PATCH 41/50] don't need future.with_statement in Python 2.6+ --- ldclient/client.py | 2 +- ldclient/util.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 30c37e53..9cab10b6 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -1,4 +1,4 @@ -from __future__ import division, with_statement, absolute_import +from __future__ import division, absolute_import import hashlib import hmac diff --git a/ldclient/util.py b/ldclient/util.py index 618a7d9e..4cfb0324 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -1,4 +1,4 @@ -from __future__ import division, with_statement, absolute_import +from __future__ import division, absolute_import import certifi import logging From 2a6d53be3c9e1e2e7df87d3f89a43227cb6d402e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 14:15:16 -0800 Subject: [PATCH 42/50] don't need future.absolute_import in Python 2.6+ --- ldclient/client.py | 2 +- ldclient/event_processor.py | 2 -- ldclient/feature_requester.py | 2 -- ldclient/sse_client.py | 2 -- ldclient/streaming.py | 1 - ldclient/util.py | 2 +- 6 files changed, 2 insertions(+), 9 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 9cab10b6..29d0c756 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -1,4 +1,4 @@ -from __future__ import division, absolute_import +from __future__ import division import hashlib import hmac diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 3b89420f..9a0cae83 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from collections import namedtuple from email.utils import parsedate import errno diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 786c1708..046c594f 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from collections import namedtuple import json import urllib3 diff --git a/ldclient/sse_client.py b/ldclient/sse_client.py index c97eb2d4..7e792961 100644 --- a/ldclient/sse_client.py +++ b/ldclient/sse_client.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import re import time import warnings diff --git a/ldclient/streaming.py b/ldclient/streaming.py index bac83433..20599eb1 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import from collections import namedtuple import json diff --git a/ldclient/util.py b/ldclient/util.py index 4cfb0324..4612f871 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -1,4 +1,4 @@ -from __future__ import division, absolute_import +from __future__ import division import certifi import logging From c32793ade292b7f80b1b80fafbed0adbb76c44c2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 14:19:54 -0800 Subject: [PATCH 43/50] don't need future.print_function when you're printing a single string with parentheses --- demo/demo.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/demo/demo.py b/demo/demo.py index 987a05d4..8ac745f4 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -1,5 +1,3 @@ -from __future__ import print_function - import logging import sys From 4971d17eaa4b79528128a91910e1fe63b2afdfba Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 14:24:21 -0800 Subject: [PATCH 44/50] don't need future.division since we're not using the / operator --- ldclient/client.py | 2 -- ldclient/util.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 29d0c756..6d6b32c7 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -1,5 +1,3 @@ -from __future__ import division - import hashlib import hmac import threading diff --git a/ldclient/util.py b/ldclient/util.py index 4612f871..fbb2f11d 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -1,5 +1,3 @@ -from __future__ import division - import certifi import logging import sys From 0abadf1efab4637f48c251d7bceaed1d724030e5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 14:24:35 -0800 Subject: [PATCH 45/50] rm unused dependency --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 8787ac53..f86f3039 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,6 @@ backoff>=1.4.3 certifi>=2018.4.16 expiringdict>=1.1.4 -future>=0.16.0 six>=1.10.0 pyRFC3339>=1.0 jsonpickle==0.9.3 From e228e90771c188893597b5e49c7559efac332a82 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 15:17:12 -0800 Subject: [PATCH 46/50] Revert "rm unused dependency" This reverts commit 0abadf1efab4637f48c251d7bceaed1d724030e5. --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index f86f3039..8787ac53 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,7 @@ backoff>=1.4.3 certifi>=2018.4.16 expiringdict>=1.1.4 +future>=0.16.0 six>=1.10.0 pyRFC3339>=1.0 jsonpickle==0.9.3 From 122d7a613b3e3228e98fa63a0b01b10b038e389f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 17:11:06 -0800 Subject: [PATCH 47/50] don't need builtins.object unless we're defining an iterator, and even then we don't need it --- ldclient/client.py | 2 -- ldclient/sse_client.py | 4 ++++ requirements.txt | 1 - testing/test_ldclient.py | 1 - testing/test_user_filter.py | 1 - 5 files changed, 4 insertions(+), 5 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 6d6b32c7..ff96475b 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -3,8 +3,6 @@ import threading import traceback -from builtins import object - from ldclient.config import Config as Config from ldclient.event_processor import NullEventProcessor from ldclient.feature_requester import FeatureRequesterImpl diff --git a/ldclient/sse_client.py b/ldclient/sse_client.py index 7e792961..5b41413b 100644 --- a/ldclient/sse_client.py +++ b/ldclient/sse_client.py @@ -109,6 +109,10 @@ def __next__(self): return msg + # The following two lines make our iterator class compatible with both Python 2.x and 3.x, + # even though they expect different magic method names. We could accomplish the same thing + # by importing builtins.object and deriving from that, but this way it's easier to see + # what we're doing. if six.PY2: next = __next__ diff --git a/requirements.txt b/requirements.txt index 8787ac53..f86f3039 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,6 @@ backoff>=1.4.3 certifi>=2018.4.16 expiringdict>=1.1.4 -future>=0.16.0 six>=1.10.0 pyRFC3339>=1.0 jsonpickle==0.9.3 diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index a31d2324..0e6c33a2 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -1,4 +1,3 @@ -from builtins import object from ldclient.client import LDClient, Config from ldclient.event_processor import NullEventProcessor from ldclient.feature_store import InMemoryFeatureStore diff --git a/testing/test_user_filter.py b/testing/test_user_filter.py index 15550541..e1711ffb 100644 --- a/testing/test_user_filter.py +++ b/testing/test_user_filter.py @@ -1,4 +1,3 @@ -from builtins import object import json from ldclient.client import Config from ldclient.user_filter import UserFilter From 6a45e700f1cc7e12b4ad44b95c1a3b05208dc15b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 29 Jan 2019 12:49:19 -0800 Subject: [PATCH 48/50] update docs with note on portability --- CONTRIBUTING.md | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 88668de9..fe972301 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,7 +8,7 @@ Development information (for developing this module itself) 1. One-time setup: - mkvirtualenv python-client + mkvirtualenv python-client 1. When working on the project be sure to activate the python-client virtualenv using the technique of your choosing. @@ -17,11 +17,15 @@ Development information (for developing this module itself) pip install -r requirements.txt pip install -r test-requirements.txt -1. Run tests: You'll need redis running locally on its default port of 6379. +1. When running unit tests, in order for `test_feature_store.py` to run, you'll need all of the supported databases (Redis, Consul, DynamoDB) running locally on their default ports. + 1. If you want integration tests to run, set the ```LD_SDK_KEY``` environment variable to a valid production SDK Key. + 1. ```$ py.test testing``` -Developing with different python versions +1. All code must be compatible with all supported Python versions as described in README. Most portability issues are addressed by using the `six` package. We are avoiding the use of `__future__` imports, since they can easily be omitted by mistake causing code in one file to behave differently from another; instead, whenever possible, use an explicit approach that makes it clear what the desired behavior is in all Python versions (e.g. if you want to do floor division, use `//`; if you want to divide as floats, explicitly cast to floats). + +Developing with different Python versions ----------------------------------------- Example for switching to python 3: From 858e001970ea0011a4ae5b84bba70050331aff38 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 29 Jan 2019 12:50:09 -0800 Subject: [PATCH 49/50] typo --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fe972301..af5083c2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -28,6 +28,6 @@ Development information (for developing this module itself) Developing with different Python versions ----------------------------------------- -Example for switching to python 3: +Example for switching to Python 3: ```virtualenv -p `which python3` ~/.virtualenvs/python-client``` \ No newline at end of file From d4d4b8aa2b07e5328c43e90e3244e58a2006bdb6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 30 Jan 2019 17:13:12 -0800 Subject: [PATCH 50/50] update package metadata prior to release --- setup.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index bf59d9a3..8a075cf8 100644 --- a/setup.py +++ b/setup.py @@ -19,12 +19,16 @@ def parse_requirements(filename): install_reqs = parse_requirements('requirements.txt') test_reqs = parse_requirements('test-requirements.txt') redis_reqs = parse_requirements('redis-requirements.txt') +consul_reqs = parse_requirements('consul-requirements.txt') +dynamodb_reqs = parse_requirements('dynamodb-requirements.txt') # reqs is a list of requirement # e.g. ['django==1.5.1', 'mezzanine==1.4.6'] reqs = [ir for ir in install_reqs] testreqs = [ir for ir in test_reqs] redisreqs = [ir for ir in redis_reqs] +consulreqs = [ir for ir in consul_reqs] +dynamodbreqs = [ir for ir in dynamodb_reqs] class PyTest(Command): @@ -63,11 +67,14 @@ def run(self): 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries', ], extras_require={ - "redis": redisreqs + "redis": redisreqs, + "consul": consulreqs, + "dynamodb": dynamodbreqs }, tests_require=testreqs, cmdclass={'test': PyTest},