From 280d1c40bac46790477b1b2defe5b4be7ebf0d7a Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Tue, 28 Jun 2016 13:55:53 -0700 Subject: [PATCH 01/42] [wip] Added polling support + indirect messages in streaming. Both polling and streaming restwrappers pass, but local tests do not pass. --- demo/demo.py | 24 ++- ldclient/client.py | 188 +++++++----------- ldclient/feature_store.py | 135 +++++++++++++ ldclient/interfaces.py | 38 +++- ldclient/polling.py | 38 ++++ ldclient/{requests.py => requester.py} | 140 ++++++------- ldclient/streaming.py | 60 ++++++ ldclient/twisted_impls.py | 13 +- ldclient/twisted_redis.py | 11 +- ldclient/util.py | 7 +- pytest.ini | 1 + test-requirements.txt | 3 +- testing/server_util.py | 4 +- testing/test_inmemoryfeaturestore.py | 2 +- ...gration.py => test_integration_polling.py} | 22 +- testing/test_integration_twisted.py | 2 +- testing/test_ldclient.py | 46 +++-- twisted-requirements.txt | 3 +- 18 files changed, 489 insertions(+), 248 deletions(-) create mode 100644 ldclient/feature_store.py create mode 100644 ldclient/polling.py rename ldclient/{requests.py => requester.py} (52%) create mode 100644 ldclient/streaming.py rename testing/{test_integration.py => test_integration_polling.py} (75%) diff --git a/demo/demo.py b/demo/demo.py index 4b57bd35..f590ea26 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -1,7 +1,25 @@ from __future__ import print_function -from ldclient import LDClient +from ldclient import LDClient, Config +import logging +import sys +import time + +root = logging.getLogger() +root.setLevel(logging.DEBUG) + +ch = logging.StreamHandler(sys.stdout) +ch.setLevel(logging.DEBUG) +formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') +ch.setFormatter(formatter) +root.addHandler(ch) if __name__ == '__main__': - apiKey = 'feefifofum' - client = LDClient(apiKey) + apiKey = 'sdk-7c55610f-385f-46c5-a3a6-2fdc9ccf3034' + config = Config(stream=True) + client = LDClient(apiKey, config) + user = {u'key': 'userKey'} + time.sleep(5) + print(client.toggle("update-app", user, False)) print(client.api_key) + + client._stop_consumers() diff --git a/ldclient/client.py b/ldclient/client.py index 63daefe5..39af4b46 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -1,12 +1,17 @@ from __future__ import division, with_statement, absolute_import -from builtins import object + import time -from ldclient.interfaces import FeatureStore -from ldclient.requests import RequestsStreamProcessor, RequestsEventConsumer, RequestsFeatureRequester -from ldclient.util import check_uwsgi, _evaluate, log import requests +from builtins import object + +from ldclient.feature_store import InMemoryFeatureStore +from ldclient.interfaces import FeatureStore +from ldclient.polling import PollingUpdateProcessor +from ldclient.requester import RequestsEventConsumer, FeatureRequesterImpl +from ldclient.streaming import StreamingUpdateProcessor +from ldclient.util import check_uwsgi, _evaluate, log # noinspection PyBroadException try: @@ -18,11 +23,8 @@ from cachecontrol import CacheControl from threading import Lock -from ldclient.rwlock import ReadWriteLock - class Config(object): - def __init__(self, base_uri='https://app.launchdarkly.com', events_uri='https://events.launchdarkly.com', @@ -35,19 +37,21 @@ def __init__(self, verify=True, defaults=None, events=True, - stream_processor_class=None, - feature_store_class=None, - feature_requester_class=None, + update_processor_class=None, + poll_interval=1, + use_ldd=False, + feature_store=InMemoryFeatureStore(), + feature_requester_class=FeatureRequesterImpl, consumer_class=None): """ - :param stream_processor_class: A factory for a StreamProcessor implementation taking the api key, config, + :param update_processor_class: A factory for an UpdateProcessor implementation taking the api key, config, and FeatureStore implementation - :type stream_processor_class: (str, Config, FeatureStore) -> StreamProcessor - :param feature_store_class: A factory for a FeatureStore implementation - :type feature_store_class: () -> FeatureStore + :type update_processor_class: (str, Config, FeatureStore) -> UpdateProcessor + :param feature_store: A FeatureStore implementation + :type feature_store: FeatureStore :param feature_requester_class: A factory for a FeatureRequester implementation taking the api key and config - :type feature_requester_class: (str, Config) -> FeatureRequester + :type feature_requester_class: (str, Config, FeatureStore) -> FeatureRequester :param consumer_class: A factory for an EventConsumer implementation taking the event queue, api key, and config :type consumer_class: (queue.Queue, str, Config) -> EventConsumer """ @@ -55,16 +59,28 @@ def __init__(self, defaults = {} self.base_uri = base_uri.rstrip('\\') + self.get_latest_features_uri = self.base_uri + '/api/eval/latest-features' self.events_uri = events_uri.rstrip('\\') self.stream_uri = stream_uri.rstrip('\\') - self.stream = stream - self.stream_processor_class = RequestsStreamProcessor if not stream_processor_class else stream_processor_class - self.feature_store_class = InMemoryFeatureStore if not feature_store_class else feature_store_class + self.stream_features_uri = self.stream_uri + '/features' + + if update_processor_class: + self.update_processor_class = update_processor_class + else: + if stream: + self.update_processor_class = StreamingUpdateProcessor + else: + self.update_processor_class = PollingUpdateProcessor + + if poll_interval < 1: + poll_interval = 1 + self.poll_interval = poll_interval + self.use_ldd = use_ldd + self.feature_store = feature_store self.consumer_class = RequestsEventConsumer if not consumer_class else consumer_class - self.feature_requester_class = RequestsFeatureRequester if not feature_requester_class else \ - feature_requester_class + self.feature_requester_class = FeatureRequesterImpl if not feature_requester_class else feature_requester_class self.connect = connect_timeout - self.read = read_timeout + self.read_timeout = read_timeout self.upload_limit = upload_limit self.capacity = capacity self.verify = verify @@ -79,73 +95,7 @@ def default(cls): return cls() -class InMemoryFeatureStore(FeatureStore): - - def __init__(self): - self._lock = ReadWriteLock() - self._initialized = False - self._features = {} - - def get(self, key): - try: - self._lock.rlock() - f = self._features.get(key) - if f is None or 'deleted' in f and f['deleted']: - return None - return f - finally: - self._lock.runlock() - - def all(self): - try: - self._lock.rlock() - return dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted']) - finally: - self._lock.runlock() - - def init(self, features): - try: - self._lock.lock() - self._features = dict(features) - self._initialized = True - finally: - self._lock.unlock() - - # noinspection PyShadowingNames - def delete(self, key, version): - try: - self._lock.lock() - f = self._features.get(key) - if f is not None and f['version'] < version: - f['deleted'] = True - f['version'] = version - elif f is None: - f = {'deleted': True, 'version': version} - self._features[key] = f - finally: - self._lock.unlock() - - def upsert(self, key, feature): - try: - self._lock.lock() - f = self._features.get(key) - if f is None or f['version'] < feature['version']: - self._features[key] = feature - log.debug("Updated feature {} to version {}".format(key, feature['version'])) - finally: - self._lock.unlock() - - @property - def initialized(self): - try: - self._lock.rlock() - return self._initialized - finally: - self._lock.runlock() - - class LDClient(object): - def __init__(self, api_key, config=None): check_uwsgi() self._api_key = api_key @@ -156,18 +106,20 @@ def __init__(self, api_key, config=None): self._offline = False self._lock = Lock() - self._store = self._config.feature_store_class() + self._store = self._config.feature_store """ :type: FeatureStore """ self._feature_requester = self._config.feature_requester_class( api_key, self._config) """ :type: FeatureRequester """ - self._stream_processor = None - if self._config.stream: - self._stream_processor = self._config.stream_processor_class( - api_key, self._config, self._store) - self._stream_processor.start() + self._update_processor = self._config.update_processor_class( + api_key, self._config, self._feature_requester, self._store) + """ :type: UpdateProcessor """ + + # TODO: block until intialized.. + self._update_processor.start() + log.info("Started LaunchDarkly Client") @property def api_key(self): @@ -183,8 +135,8 @@ def _check_consumer(self): def _stop_consumers(self): if self._consumer and self._consumer.is_alive(): self._consumer.stop() - if self._stream_processor and self._stream_processor.is_alive(): - self._stream_processor.stop() + if self._update_processor and self._update_processor.is_alive(): + self._update_processor.stop() def _send(self, event): if self._offline or not self._config.events: @@ -216,6 +168,9 @@ def set_online(self): def is_offline(self): return self._offline + def is_initialized(self): + return self._offline or self._config.use_ldd or self._update_processor.initialized + def flush(self): if self._offline: return @@ -226,36 +181,39 @@ def get_flag(self, key, user, default=False): return self.toggle(key, user, default) def toggle(self, key, user, default=False): - self._sanitize_user(user) + log.info("togglin..") default = self._config.get_default(key, default) + def send_event(value): + self._send({'kind': 'feature', 'key': key, + 'user': user, 'value': value, 'default': default}) + if self._offline: + # log warning? + send_event(default) return default - def cb(feature): - if feature is None: - val = default - else: - val = _evaluate(feature, user) - if val is None: - val = default - self._send({'kind': 'feature', 'key': key, - 'user': user, 'value': val, 'default': default}) - return val + self._sanitize_user(user) - if self._config.stream and self._store.initialized: - return cb(self._store.get(key)) + if 'key' in user and user['key']: + feature = self._store.get(key) else: - # noinspection PyBroadException - try: - return self._feature_requester.get(key, cb) - except Exception: - log.exception( - 'Unhandled exception. Returning default value for flag.') - return cb(None) + # log warning? + send_event(default) + return default + + val = _evaluate(feature, user) + if val is None: + # log warning? + send_event(default) + return default + + send_event(val) + return val def _sanitize_user(self, user): if 'key' in user: user['key'] = str(user['key']) + __all__ = ['LDClient', 'Config'] diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py new file mode 100644 index 00000000..d681d073 --- /dev/null +++ b/ldclient/feature_store.py @@ -0,0 +1,135 @@ +from ldclient.util import log +from ldclient.interfaces import FeatureStore +from ldclient.rwlock import ReadWriteLock + + +class InMemoryFeatureStore(FeatureStore): + + def __init__(self): + self._lock = ReadWriteLock() + self._initialized = False + self._features = {} + + def get(self, key): + try: + self._lock.rlock() + f = self._features.get(key) + if f is None or 'deleted' in f and f['deleted']: + return None + return f + finally: + self._lock.runlock() + + def all(self): + try: + self._lock.rlock() + return dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted']) + finally: + self._lock.runlock() + + def init(self, features): + try: + self._lock.lock() + self._features = dict(features) + self._initialized = True + log.debug("Initialized feature store with " + str(len(features)) + " features") + finally: + self._lock.unlock() + + # noinspection PyShadowingNames + def delete(self, key, version): + try: + self._lock.lock() + f = self._features.get(key) + if f is not None and f['version'] < version: + f['deleted'] = True + f['version'] = version + elif f is None: + f = {'deleted': True, 'version': version} + self._features[key] = f + finally: + self._lock.unlock() + + def upsert(self, key, feature): + try: + self._lock.lock() + f = self._features.get(key) + if f is None or f['version'] < feature['version']: + self._features[key] = feature + log.debug("Updated feature {} to version {}".format(key, feature['version'])) + finally: + self._lock.unlock() + + + @property + def initialized(self): + try: + self._lock.rlock() + return self._initialized + finally: + self._lock.runlock() + + ### Old version: + # class InMemoryFeatureStore(FeatureStore): + # + # def __init__(self): + # self._lock = ReadWriteLock() + # self._initialized = False + # self._features = {} + # + # def get(self, key): + # try: + # self._lock.rlock() + # f = self._features.get(key) + # if f is None or 'deleted' in f and f['deleted']: + # return None + # return f + # finally: + # self._lock.runlock() + # + # def all(self): + # try: + # self._lock.rlock() + # return dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted']) + # finally: + # self._lock.runlock() + # + # def init(self, features): + # try: + # self._lock.lock() + # self._features = dict(features) + # self._initialized = True + # finally: + # self._lock.unlock() + # + # # noinspection PyShadowingNames + # def delete(self, key, version): + # try: + # self._lock.lock() + # f = self._features.get(key) + # if f is not None and f['version'] < version: + # f['deleted'] = True + # f['version'] = version + # elif f is None: + # f = {'deleted': True, 'version': version} + # self._features[key] = f + # finally: + # self._lock.unlock() + # + # def upsert(self, key, feature): + # try: + # self._lock.lock() + # f = self._features.get(key) + # if f is None or f['version'] < feature['version']: + # self._features[key] = feature + # log.debug("Updated feature {} to version {}".format(key, feature['version'])) + # finally: + # self._lock.unlock() + # + # @property + # def initialized(self): + # try: + # self._lock.rlock() + # return self._initialized + # finally: + # self._lock.runlock() \ No newline at end of file diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 5e919d1f..378e02fb 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -29,7 +29,7 @@ def all(self): @abstractmethod def init(self, features): """ - Initializes the store with a set of feature flags. Meant to be called by the optional StreamProcessor + Initializes the store with a set of feature flags. Meant to be called by the UpdateProcessor :param features: The features and their data as provided by LD :type features: dict[str, dict] @@ -93,11 +93,29 @@ def is_alive(self): return True -class StreamProcessor(BackgroundOperation): +class UpdateProcessor(BackgroundOperation): """ - Populates a store from an external data source + Responsible for retrieving Feature Flag updates from LaunchDarkly """ __metaclass__ = ABCMeta + # + # @abstractmethod + # def initialized(self): + # """ + # :rtype: Returns whether the processor has been initialized yet or not + # """ + # + # @abstractmethod + # def close(self): + # """ + # Closes the processor + # """ + # + # @abstractmethod + # def start(self): + # """ + # Starts the processor + # """ class EventConsumer(BackgroundOperation): @@ -120,13 +138,13 @@ class FeatureRequester(object): __metaclass__ = ABCMeta @abstractmethod - def get(self, key, callback): + def getAll(self): + """ + Gets all feature flags. """ - Gets a feature and calls the callback with the feature data to return the result - :param key: The feature key - :type key: str - :param callback: The function that accepts the feature data and returns the feature value - :type callback: function - :return: The feature value. None if not found + def getOne(self, key): + """ + Gets one Feature flag + :return: """ diff --git a/ldclient/polling.py b/ldclient/polling.py new file mode 100644 index 00000000..9982bc2f --- /dev/null +++ b/ldclient/polling.py @@ -0,0 +1,38 @@ +import threading + +from ldclient.interfaces import UpdateProcessor +from ldclient.util import log + +# TODO account for drift- now we're just pausing 1 second in between requests +class PollingUpdateProcessor(UpdateProcessor): + def __init__(self, api_key, config, requester, store): + self.daemon = True + self._api_key = api_key + self._config = config + self._requester = requester + self._store = store + self._running = False + self._timer = threading.Timer(self._config.poll_interval, self.poll) + + def start(self): + if not self._running: + log.debug("Starting PollingUpdateProcessor") + self._running = True + self.run() + + def run(self): + if self._running: + self._timer = threading.Timer(self._config.poll_interval, self.poll) + self._timer.start() + + def poll(self): + self._store.init(self._requester.getAll()) + self.run() + + def initialized(self): + return self._running and self._store.initialized + + def stop(self): + log.debug("Closing PollingUpdateProcessor") + self._running = False + self._timer.cancel() diff --git a/ldclient/requests.py b/ldclient/requester.py similarity index 52% rename from ldclient/requests.py rename to ldclient/requester.py index ee56296f..f47ae928 100644 --- a/ldclient/requests.py +++ b/ldclient/requester.py @@ -1,104 +1,88 @@ from __future__ import absolute_import + import errno import json from threading import Thread -from cachecontrol import CacheControl -from ldclient.util import log -from ldclient.interfaces import FeatureRequester, StreamProcessor, EventConsumer -from ldclient.util import _headers, _stream_headers + import requests +from cachecontrol import CacheControl from requests.packages.urllib3.exceptions import ProtocolError -from sseclient import SSEClient +from ldclient.interfaces import EventConsumer, FeatureRequester +from ldclient.util import _headers +from ldclient.util import log -class RequestsFeatureRequester(FeatureRequester): +class FeatureRequesterImpl(FeatureRequester): def __init__(self, api_key, config): self._api_key = api_key self._session = CacheControl(requests.Session()) self._config = config - def get(self, key, callback): - # return callback(do_toggle(key)) - - def do_toggle(should_retry): - # noinspection PyBroadException,PyUnresolvedReferences - try: - val = self._toggle(key) - return val - except ProtocolError as e: - inner = e.args[1] - if inner.errno == errno.ECONNRESET and should_retry: - log.warning( - 'ProtocolError exception caught while getting flag. Retrying.') - return do_toggle(False) - else: - log.exception( - 'Unhandled exception. Returning default value for flag.') - return None - except Exception: - log.exception( - 'Unhandled exception. Returning default value for flag.') - return None - - return callback(do_toggle(True)) + def getAll(self): + hdrs = _headers(self._api_key) + uri = self._config.get_latest_features_uri + r = self._session.get(uri, headers=hdrs, timeout=( + self._config.connect, self._config.read_timeout)) + r.raise_for_status() + features = r.json() + return features - def _toggle(self, key): + def get(self, key): hdrs = _headers(self._api_key) - uri = self._config.base_uri + '/api/eval/features/' + key + uri = self._config.get_latest_features_uri + '/' + key r = self._session.get(uri, headers=hdrs, timeout=( - self._config.connect, self._config.read)) + self._config.connect, self._config.read_timeout)) r.raise_for_status() feature = r.json() return feature -class RequestsStreamProcessor(Thread, StreamProcessor): - - def __init__(self, api_key, config, store): - Thread.__init__(self) - self.daemon = True - self._api_key = api_key - self._config = config - self._store = store - self._running = False - - def run(self): - log.debug("Starting stream processor") - self._running = True - hdrs = _stream_headers(self._api_key) - uri = self._config.stream_uri + "/features" - messages = SSEClient(uri, verify=self._config.verify, headers=hdrs) - for msg in messages: - if not self._running: - break - self.process_message(self._store, msg) - - def stop(self): - self._running = False - - @staticmethod - def process_message(store, msg): - payload = json.loads(msg.data) - log.debug("Recieved stream event {}".format(msg.event)) - if msg.event == 'put': - store.init(payload) - elif msg.event == 'patch': - key = payload['path'][1:] - feature = payload['data'] - log.debug("Updating feature {}".format(key)) - store.upsert(key, feature) - elif msg.event == 'delete': - key = payload['path'][1:] - # noinspection PyShadowingNames - version = payload['version'] - store.delete(key, version) - else: - log.warning('Unhandled event in stream processor: ' + msg.event) - +# class RequestsStreamProcessor(Thread, StreamProcessor): +# +# def __init__(self, api_key, config, store): +# Thread.__init__(self) +# self.daemon = True +# self._api_key = api_key +# self._config = config +# self._store = store +# self._running = False +# +# def run(self): +# log.debug("Starting stream processor") +# self._running = True +# hdrs = _stream_headers(self._api_key) +# uri = self._config.stream_uri + "/features" +# messages = SSEClient(uri, verify=self._config.verify, headers=hdrs) +# for msg in messages: +# if not self._running: +# break +# self.process_message(self._store, msg) +# +# def stop(self): +# self._running = False +# +# @staticmethod +# def process_message(store, msg): +# payload = json.loads(msg.data) +# log.debug("Recieved stream event {}".format(msg.event)) +# if msg.event == 'put': +# store.init(payload) +# elif msg.event == 'patch': +# key = payload['path'][1:] +# feature = payload['data'] +# log.debug("Updating feature {}".format(key)) +# store.upsert(key, feature) +# elif msg.event == 'delete': +# key = payload['path'][1:] +# # noinspection PyShadowingNames +# version = payload['version'] +# store.delete(key, version) +# else: +# log.warning('Unhandled event in stream processor: ' + msg.event) +# class RequestsEventConsumer(Thread, EventConsumer): - def __init__(self, event_queue, api_key, config): Thread.__init__(self) self._session = requests.Session() @@ -130,7 +114,7 @@ def do_send(should_retry): body = events hdrs = _headers(self._api_key) uri = self._config.events_uri + '/bulk' - r = self._session.post(uri, headers=hdrs, timeout=(self._config.connect, self._config.read), + r = self._session.post(uri, headers=hdrs, timeout=(self._config.connect, self._config.read_timeout), data=json.dumps(body)) r.raise_for_status() except ProtocolError as e: diff --git a/ldclient/streaming.py b/ldclient/streaming.py new file mode 100644 index 00000000..c493363e --- /dev/null +++ b/ldclient/streaming.py @@ -0,0 +1,60 @@ +import json +from threading import Thread + +from sseclient import SSEClient + +from ldclient.interfaces import UpdateProcessor +from ldclient.util import _stream_headers, log + + +class StreamingUpdateProcessor(Thread, UpdateProcessor): + + def __init__(self, api_key, config, requester, store): + Thread.__init__(self) + self.daemon = True + self._api_key = api_key + self._config = config + self._requester = requester + self._store = store + self._running = False + + def run(self): + log.debug("Starting StreamingUpdateProcessor") + self._running = True + hdrs = _stream_headers(self._api_key) + uri = self._config.stream_features_uri + messages = SSEClient(uri, verify=self._config.verify, headers=hdrs) + for msg in messages: + if not self._running: + break + self.process_message(self._store, self._requester, msg) + + def stop(self): + self._running = False + + def initialized(self): + return self._running + + @staticmethod + def process_message(store, requester, msg): + payload = json.loads(msg.data) + log.debug("Received stream event {}".format(msg.event)) + if msg.event == 'put': + store.init(payload) + elif msg.event == 'patch': + key = payload['path'][1:] + feature = payload['data'] + log.debug("Updating feature {}".format(key)) + store.upsert(key, feature) + # elif msg.event == "indirect/patch": + # key = payload['data'] + # store.upsert(key, requester.get(key)) + # elif msg.event == "indirect/put": + # store.init(requester.getAll()) + elif msg.event == 'delete': + key = payload['path'][1:] + # noinspection PyShadowingNames + version = payload['version'] + store.delete(key, version) + else: + log.warning('Unhandled event in stream processor: ' + msg.event) \ No newline at end of file diff --git a/ldclient/twisted_impls.py b/ldclient/twisted_impls.py index c1835c43..0af050e8 100644 --- a/ldclient/twisted_impls.py +++ b/ldclient/twisted_impls.py @@ -6,8 +6,8 @@ from cachecontrol import CacheControl from ldclient.client import Config, LDClient -from ldclient.interfaces import FeatureRequester, StreamProcessor, EventConsumer -from ldclient.requests import RequestsStreamProcessor +from ldclient.interfaces import FeatureRequester, EventConsumer, UpdateProcessor +from ldclient.streaming import StreamingUpdateProcessor from ldclient.twisted_sse import TwistedSSEClient from ldclient.util import _headers, _stream_headers, log from requests.packages.urllib3.exceptions import ProtocolError @@ -65,20 +65,23 @@ def _toggle(self, key): class TwistedConfig(Config): def __init__(self, *args, **kwargs): - self.stream_processor_class = TwistedStreamProcessor + self.update_processor_class = TwistedStreamProcessor self.consumer_class = TwistedEventConsumer self.feature_requester_class = TwistedHttpFeatureRequester super(TwistedConfig, self).__init__(*args, **kwargs) -class TwistedStreamProcessor(StreamProcessor): +class TwistedStreamProcessor(UpdateProcessor): + + def close(self): + self.sse_client.stop() def __init__(self, api_key, config, store): self._store = store self.sse_client = TwistedSSEClient(config.stream_uri + "/", headers=_stream_headers(api_key, "PythonTwistedClient"), verify=config.verify, - on_event=partial(RequestsStreamProcessor.process_message, self._store)) + on_event=partial(StreamingUpdateProcessor.process_message, self._store)) self.running = False def start(self): diff --git a/ldclient/twisted_redis.py b/ldclient/twisted_redis.py index c4558a59..c31c49dd 100644 --- a/ldclient/twisted_redis.py +++ b/ldclient/twisted_redis.py @@ -1,5 +1,5 @@ import json -from ldclient.interfaces import StreamProcessor +from ldclient.interfaces import StreamProcessor, UpdateProcessor from twisted.internet import task, defer, protocol, reactor from txredis.client import RedisClient @@ -9,7 +9,14 @@ def create_redis_ldd_processor(api_key, config, store, **kwargs): return TwistedRedisLDDStreamProcessor(store, **kwargs) -class TwistedRedisLDDStreamProcessor(StreamProcessor): +class TwistedRedisLDDStreamProcessor(UpdateProcessor): + def close(self): + pass + # TODO: implement + + def initialized(self): + pass + # TODO: implement def __init__(self, store, update_delay=15, redis_host='localhost', redis_port=6379, diff --git a/ldclient/util.py b/ldclient/util.py index 20e4c0af..d67a1f82 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -100,11 +100,8 @@ def check_uwsgi(): import uwsgi if not uwsgi.opt.get('enable-threads'): - log.warning('The LaunchDarkly client requires the enable-threads option ' - 'be passed to uWSGI. If enable-threads is not provided, no ' - 'threads will run and event data will not be sent to LaunchDarkly. ' - 'To learn more, see ' - 'http://docs.launchdarkly.com/v1.0/docs/python-sdk-reference#configuring-uwsgi') + log.error('The LaunchDarkly client requires the enable-threads option be passed to uWSGI. ' + 'To learn more, see http://docs.launchdarkly.com/v1.0/docs/python-sdk-reference#configuring-uwsgi') def _evaluate(feature, user): diff --git a/pytest.ini b/pytest.ini index df0d38d0..b86adf8e 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,2 +1,3 @@ [pytest] +# enables pytest-twisted twisted = 1 \ No newline at end of file diff --git a/test-requirements.txt b/test-requirements.txt index 2b820b06..659418bc 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,2 +1,3 @@ -pytest==2.7.2 +pytest>=2.8 pytest-twisted==1.5 +pytest-timeout>=1.0 diff --git a/testing/server_util.py b/testing/server_util.py index 980a7759..a847b2a3 100644 --- a/testing/server_util.py +++ b/testing/server_util.py @@ -91,14 +91,14 @@ def do_nothing(handler): self.post_paths["/bulk"] = do_nothing return q - def add_feature(self, key, data): + def add_feature(self, data): def handle(handler): handler.send_response(200) handler.send_header('Content-type', 'application/json') handler.end_headers() handler.wfile.write(json.dumps(data).encode('utf-8')) - self.get("/api/eval/features/{}".format(key), handle) + self.get("/api/eval/latest-features", handle) def get(self, path, func): """ diff --git a/testing/test_inmemoryfeaturestore.py b/testing/test_inmemoryfeaturestore.py index d46dbbfa..14320a49 100644 --- a/testing/test_inmemoryfeaturestore.py +++ b/testing/test_inmemoryfeaturestore.py @@ -1,4 +1,4 @@ -from ldclient.client import InMemoryFeatureStore +from ldclient.feature_store import InMemoryFeatureStore import pytest def make_feature(key, ver): diff --git a/testing/test_integration.py b/testing/test_integration_polling.py similarity index 75% rename from testing/test_integration.py rename to testing/test_integration_polling.py index ca747ce9..472f2a92 100644 --- a/testing/test_integration.py +++ b/testing/test_integration_polling.py @@ -1,5 +1,6 @@ import logging from ldclient.client import Config, LDClient +from ldclient.feature_store import InMemoryFeatureStore from ldclient.twisted_sse import Event import pytest from testing.server_util import SSEServer, GenericServer @@ -31,16 +32,21 @@ def fin(): def test_toggle(server): - server.add_feature("foo", feature("foo", "jim")['foo']) - client = LDClient("apikey", Config(base_uri=server.url, events_uri=server.url)) - wait_until(lambda: client.toggle("foo", user('xyz'), "blah") == "jim") + feature_store = InMemoryFeatureStore() + server.add_feature( + feature("foo", "jim")['foo']) + server.post_events() -def test_sse_init(server, stream): - stream.queue.put(Event(event="put", data=feature("foo", "jim"))) - client = LDClient("apikey", Config( - stream=True, base_uri=server.url, events_uri=server.url, stream_uri=stream.url)) - wait_until(lambda: client.toggle("foo", user('xyz'), "blah") == "jim") + client = LDClient("apikey", Config(stream=False, base_uri=server.url, events_uri=server.url, feature_store=feature_store)) + wait_until(lambda: client.toggle("foo", user('xyz'), False) == True) + + +# def test_sse_init(server, stream): +# stream.queue.put(Event(event="put", data=feature("foo", "jim"))) +# client = LDClient("apikey", Config( +# stream=True, base_uri=server.url, events_uri=server.url, stream_uri=stream.url)) +# wait_until(lambda: client.toggle("foo", user('xyz'), False) == True) # Doesn't seem to handle disconnects? diff --git a/testing/test_integration_twisted.py b/testing/test_integration_twisted.py index a1b1107e..231e13b0 100644 --- a/testing/test_integration_twisted.py +++ b/testing/test_integration_twisted.py @@ -1,5 +1,5 @@ import logging -from ldclient import TwistedConfig, TwistedLDClient, LDClient +from ldclient import LDClient from ldclient.twisted_sse import Event import pytest from testing.server_util import SSEServer, GenericServer diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 1c8c0b31..7d832ae3 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -1,6 +1,7 @@ from builtins import object from ldclient.client import LDClient, Config -from ldclient.interfaces import FeatureRequester +from ldclient.feature_store import InMemoryFeatureStore +from ldclient.interfaces import FeatureRequester, FeatureStore import pytest from testing.sync_util import wait_until @@ -10,14 +11,29 @@ import Queue as queue -class MockFeatureRequester(FeatureRequester): +class MockFeatureStore(FeatureStore): + def delete(self, key, version): + pass + + @property + def initialized(self): + pass + + def init(self, features): + pass + + def all(self): + pass + + def upsert(self, key, feature): + pass def __init__(self, *_): pass - def get(self, key, callback): + def get(self, key): if key == "feature.key": - return callback({ + return { u'key': u'feature.key', u'salt': u'abc', u'on': True, @@ -33,13 +49,12 @@ def get(self, key, callback): u'targets': [] } ] - }) + } else: - return callback(None) + return None -client = LDClient("API_KEY", Config("http://localhost:3000", - feature_requester_class=MockFeatureRequester)) +client = LDClient("API_KEY", Config("http://localhost:3000", feature_store=MockFeatureStore())) user = { u'key': u'xyz', @@ -59,7 +74,6 @@ def get(self, key, callback): class MockConsumer(object): - def __init__(self, *_): self._running = False @@ -135,10 +149,12 @@ def expected_event(e): assert expected_event(client._queue.get(False)) + def test_sanitize_user(): client._sanitize_user(numeric_key_user) assert numeric_key_user == sanitized_numeric_key_user + def test_toggle_event_numeric_user_key(): client.toggle('feature.key', numeric_key_user, default=None) @@ -191,7 +207,8 @@ def test_track_numeric_key_user(): client.track('my_event', numeric_key_user, 42) def expected_event(e): - return e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == sanitized_numeric_key_user and e['data'] == 42 + return e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == sanitized_numeric_key_user and e[ + 'data'] == 42 assert expected_event(client._queue.get(False)) @@ -211,25 +228,22 @@ def test_defaults(): def test_defaults_and_online(): client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": "bar"}, - feature_requester_class=MockFeatureRequester, consumer_class=MockConsumer)) assert "bar" == client.toggle('foo', user, default="jim") assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e[ - 'key'] == u'foo' and e['user'] == user) + 'key'] == u'foo' and e['user'] == user) def test_defaults_and_online_no_default(): client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": "bar"}, - feature_requester_class=MockFeatureRequester, consumer_class=MockConsumer)) assert "jim" == client.toggle('baz', user, default="jim") assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e[ - 'key'] == u'baz' and e['user'] == user) + 'key'] == u'baz' and e['user'] == user) def test_exception_in_retrieval(): class ExceptionFeatureRequester(FeatureRequester): - def __init__(self, *_): pass @@ -241,7 +255,7 @@ def get(self, key, callback): consumer_class=MockConsumer)) assert "bar" == client.toggle('foo', user, default="jim") assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e[ - 'key'] == u'foo' and e['user'] == user) + 'key'] == u'foo' and e['user'] == user) def test_no_defaults(): diff --git a/twisted-requirements.txt b/twisted-requirements.txt index 96f39790..fd925dba 100644 --- a/twisted-requirements.txt +++ b/twisted-requirements.txt @@ -1,4 +1,5 @@ txrequests>=0.9 pyOpenSSL>=0.14 txredis>=2.3 -cryptography>=1.0 \ No newline at end of file +cryptography>=1.0 +service_identity>=16.0 \ No newline at end of file From 02b7f2d1fc3c15601a85648cce41436d0d3c13c8 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Tue, 28 Jun 2016 17:17:26 -0700 Subject: [PATCH 02/42] [wip] Added polling support + indirect messages in streaming. Both polling and streaming restwrappers pass, All but some integration tests pass --- ...gration_polling.py => test_integration.py} | 22 +++++------- testing/test_ldclient.py | 36 +++++++++++++------ 2 files changed, 33 insertions(+), 25 deletions(-) rename testing/{test_integration_polling.py => test_integration.py} (75%) diff --git a/testing/test_integration_polling.py b/testing/test_integration.py similarity index 75% rename from testing/test_integration_polling.py rename to testing/test_integration.py index 472f2a92..4757d807 100644 --- a/testing/test_integration_polling.py +++ b/testing/test_integration.py @@ -1,6 +1,5 @@ import logging from ldclient.client import Config, LDClient -from ldclient.feature_store import InMemoryFeatureStore from ldclient.twisted_sse import Event import pytest from testing.server_util import SSEServer, GenericServer @@ -32,21 +31,16 @@ def fin(): def test_toggle(server): - feature_store = InMemoryFeatureStore() - server.add_feature( - feature("foo", "jim")['foo']) + server.add_feature(feature("foo", "jim")['foo']) + client = LDClient("apikey", Config(stream=True, base_uri=server.url, events_uri=server.url)) + wait_until(lambda: client.toggle("foo", user('xyz'), "blah") == "jim") - server.post_events() - client = LDClient("apikey", Config(stream=False, base_uri=server.url, events_uri=server.url, feature_store=feature_store)) - wait_until(lambda: client.toggle("foo", user('xyz'), False) == True) - - -# def test_sse_init(server, stream): -# stream.queue.put(Event(event="put", data=feature("foo", "jim"))) -# client = LDClient("apikey", Config( -# stream=True, base_uri=server.url, events_uri=server.url, stream_uri=stream.url)) -# wait_until(lambda: client.toggle("foo", user('xyz'), False) == True) +def test_sse_init(server, stream): + stream.queue.put(Event(event="put", data=feature("foo", "jim"))) + client = LDClient("apikey", Config( + stream=True, base_uri=server.url, events_uri=server.url, stream_uri=stream.url)) + wait_until(lambda: client.toggle("foo", user('xyz'), "blah") == "jim") # Doesn't seem to handle disconnects? diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 7d832ae3..e94c7106 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -90,6 +90,14 @@ def flush(self): pass +class MockFeatureRequester(FeatureRequester): + def __init__(self, *_): + pass + + def getAll(self): + pass + + def mock_consumer(): return MockConsumer() @@ -145,7 +153,8 @@ def test_toggle_event(): client.toggle('feature.key', user, default=None) def expected_event(e): - return e['kind'] == 'feature' and e['key'] == 'feature.key' and e['user'] == user and e['value'] == True and e['default'] == None + return e['kind'] == 'feature' and e['key'] == 'feature.key' and e['user'] == user and e['value'] == True and e[ + 'default'] == None assert expected_event(client._queue.get(False)) @@ -159,7 +168,9 @@ def test_toggle_event_numeric_user_key(): client.toggle('feature.key', numeric_key_user, default=None) def expected_event(e): - return e['kind'] == 'feature' and e['key'] == 'feature.key' and e['user'] == sanitized_numeric_key_user and e['value'] == True and e['default'] == None + return e['kind'] == 'feature' and e['key'] == 'feature.key' and e['user'] == sanitized_numeric_key_user and e[ + 'value'] == True and \ + e['default'] == None assert expected_event(client._queue.get(False)) @@ -227,16 +238,19 @@ def test_defaults(): def test_defaults_and_online(): - client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": "bar"}, - consumer_class=MockConsumer)) - assert "bar" == client.toggle('foo', user, default="jim") - assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e[ - 'key'] == u'foo' and e['user'] == user) + expected = "bar" + my_client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": expected}, + consumer_class=MockConsumer, feature_requester_class=MockFeatureRequester, + feature_store=InMemoryFeatureStore())) + actual = my_client.toggle('foo', user, default="originalDefault") + print(str(actual)) + assert actual == expected + assert wait_for_event(my_client, lambda e: e['kind'] == 'feature' and e['key'] == u'foo' and e['user'] == user) def test_defaults_and_online_no_default(): client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": "bar"}, - consumer_class=MockConsumer)) + consumer_class=MockConsumer, feature_requester_class=MockFeatureRequester)) assert "jim" == client.toggle('baz', user, default="jim") assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e[ 'key'] == u'baz' and e['user'] == user) @@ -247,15 +261,15 @@ class ExceptionFeatureRequester(FeatureRequester): def __init__(self, *_): pass - def get(self, key, callback): + def getAll(self): raise Exception("blah") client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": "bar"}, + feature_store=InMemoryFeatureStore(), feature_requester_class=ExceptionFeatureRequester, consumer_class=MockConsumer)) assert "bar" == client.toggle('foo', user, default="jim") - assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e[ - 'key'] == u'foo' and e['user'] == user) + assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e['key'] == u'foo' and e['user'] == user) def test_no_defaults(): From c4501cdd0345394c0ce31c0eca7fa2c0e64fd877 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 29 Jun 2016 17:45:48 -0700 Subject: [PATCH 03/42] cleanup --- ldclient/client.py | 6 +++-- ldclient/streaming.py | 10 ++++---- ldclient/twisted_impls.py | 36 +++++++++++------------------ testing/test_integration.py | 16 ++++--------- testing/test_integration_twisted.py | 10 +------- 5 files changed, 28 insertions(+), 50 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 39af4b46..82b1b0a3 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -23,6 +23,8 @@ from cachecontrol import CacheControl from threading import Lock +GET_LATEST_FEATURES_PATH = '/api/eval/latest-features' +STREAM_FEATURES_PATH = '/features' class Config(object): def __init__(self, @@ -59,10 +61,10 @@ def __init__(self, defaults = {} self.base_uri = base_uri.rstrip('\\') - self.get_latest_features_uri = self.base_uri + '/api/eval/latest-features' + self.get_latest_features_uri = self.base_uri + GET_LATEST_FEATURES_PATH self.events_uri = events_uri.rstrip('\\') self.stream_uri = stream_uri.rstrip('\\') - self.stream_features_uri = self.stream_uri + '/features' + self.stream_features_uri = self.stream_uri + STREAM_FEATURES_PATH if update_processor_class: self.update_processor_class = update_processor_class diff --git a/ldclient/streaming.py b/ldclient/streaming.py index c493363e..d51d6919 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -46,11 +46,11 @@ def process_message(store, requester, msg): feature = payload['data'] log.debug("Updating feature {}".format(key)) store.upsert(key, feature) - # elif msg.event == "indirect/patch": - # key = payload['data'] - # store.upsert(key, requester.get(key)) - # elif msg.event == "indirect/put": - # store.init(requester.getAll()) + elif msg.event == "indirect/patch": + key = payload['data'] + store.upsert(key, requester.get(key)) + elif msg.event == "indirect/put": + store.init(requester.getAll()) elif msg.event == 'delete': key = payload['path'][1:] # noinspection PyShadowingNames diff --git a/ldclient/twisted_impls.py b/ldclient/twisted_impls.py index 0af050e8..b6524037 100644 --- a/ldclient/twisted_impls.py +++ b/ldclient/twisted_impls.py @@ -22,40 +22,33 @@ def __init__(self, api_key, config): self._session = CacheControl(txrequests.Session()) self._config = config - def get(self, key, callback): - d = self.toggle(key) - d.addBoth(callback) - return d - - def toggle(self, key): + def getAll(self): @defer.inlineCallbacks def run(should_retry): # noinspection PyBroadException try: - val = yield self._toggle(key) + val = yield self._get_all(self) defer.returnValue(val) except ProtocolError as e: inner = e.args[1] if inner.errno == errno.ECONNRESET and should_retry: log.warning( - 'ProtocolError exception caught while getting flag. Retrying.') + 'ProtocolError exception caught while getting flags. Retrying.') d = yield run(False) defer.returnValue(d) else: - log.exception( - 'Unhandled exception. Returning default value for flag.') + log.exception('Unhandled exception.') defer.returnValue(None) except Exception: - log.exception( - 'Unhandled exception. Returning default value for flag.') + log.exception('Unhandled exception.') defer.returnValue(None) return run(True) @defer.inlineCallbacks - def _toggle(self, key): + def _get_all(self): hdrs = _headers(self._api_key) - uri = self._config.base_uri + '/api/eval/features/' + key + uri = self._config.get_latest_features_uri r = yield self._session.get(uri, headers=hdrs, timeout=(self._config.connect, self._config.read)) r.raise_for_status() feature = r.json() @@ -72,16 +65,18 @@ def __init__(self, *args, **kwargs): class TwistedStreamProcessor(UpdateProcessor): - def close(self): self.sse_client.stop() - def __init__(self, api_key, config, store): + def __init__(self, api_key, config, store, requester): self._store = store - self.sse_client = TwistedSSEClient(config.stream_uri + "/", headers=_stream_headers(api_key, - "PythonTwistedClient"), + self._requester = requester + self.sse_client = TwistedSSEClient(config.stream_features_uri, + headers=_stream_headers(api_key, "PythonTwistedClient"), verify=config.verify, - on_event=partial(StreamingUpdateProcessor.process_message, self._store)) + on_event=partial(StreamingUpdateProcessor.process_message, + self._store, + self._requester)) self.running = False def start(self): @@ -91,9 +86,6 @@ def start(self): def stop(self): self.sse_client.stop() - def get_feature(self, key): - return self._store.get(key) - def initialized(self): return self._store.initialized() diff --git a/testing/test_integration.py b/testing/test_integration.py index 4757d807..3dd7bbba 100644 --- a/testing/test_integration.py +++ b/testing/test_integration.py @@ -30,18 +30,10 @@ def fin(): return server -def test_toggle(server): - server.add_feature(feature("foo", "jim")['foo']) - client = LDClient("apikey", Config(stream=True, base_uri=server.url, events_uri=server.url)) - wait_until(lambda: client.toggle("foo", user('xyz'), "blah") == "jim") - - -def test_sse_init(server, stream): - stream.queue.put(Event(event="put", data=feature("foo", "jim"))) - client = LDClient("apikey", Config( - stream=True, base_uri=server.url, events_uri=server.url, stream_uri=stream.url)) - wait_until(lambda: client.toggle("foo", user('xyz'), "blah") == "jim") - +def test_toggle(server, stream): + stream.queue.put(Event(event="put", data=feature("foo", True))) + client = LDClient("apikey", Config(stream=True, base_uri=server.url, events_uri=server.url, stream_uri=stream.url)) + wait_until(lambda: client.toggle("foo", user('xyz'), False) is True) # Doesn't seem to handle disconnects? # def test_sse_reconnect(server, stream): diff --git a/testing/test_integration_twisted.py b/testing/test_integration_twisted.py index 231e13b0..8f7d3a7f 100644 --- a/testing/test_integration_twisted.py +++ b/testing/test_integration_twisted.py @@ -1,5 +1,5 @@ import logging -from ldclient import LDClient +from ldclient import LDClient, TwistedLDClient, TwistedConfig from ldclient.twisted_sse import Event import pytest from testing.server_util import SSEServer, GenericServer @@ -29,14 +29,6 @@ def fin(): request.addfinalizer(fin) return server - -@pytest.inlineCallbacks -def test_toggle(server): - server.add_feature("foo", feature("foo", "jim")['foo']) - client = TwistedLDClient("apikey", TwistedConfig(base_uri=server.url)) - yield wait_until(is_equal(lambda: client.toggle("foo", user('xyz'), "blah"), "jim")) - - @pytest.inlineCallbacks def test_sse_init(server, stream): stream.queue.put(Event(event="put", data=feature("foo", "jim"))) From ce052170ac157e27ae021974818ff299ec8852d7 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 29 Jun 2016 18:06:26 -0700 Subject: [PATCH 04/42] polling client no longer drifts. --- demo/demo.py | 2 +- ldclient/client.py | 7 ------- ldclient/polling.py | 32 ++++++++++++++------------------ 3 files changed, 15 insertions(+), 26 deletions(-) diff --git a/demo/demo.py b/demo/demo.py index f590ea26..2b4e9c1a 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -15,7 +15,7 @@ if __name__ == '__main__': apiKey = 'sdk-7c55610f-385f-46c5-a3a6-2fdc9ccf3034' - config = Config(stream=True) + config = Config(stream=False) client = LDClient(apiKey, config) user = {u'key': 'userKey'} time.sleep(5) diff --git a/ldclient/client.py b/ldclient/client.py index 82b1b0a3..e72b415e 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -119,7 +119,6 @@ def __init__(self, api_key, config=None): api_key, self._config, self._feature_requester, self._store) """ :type: UpdateProcessor """ - # TODO: block until intialized.. self._update_processor.start() log.info("Started LaunchDarkly Client") @@ -170,9 +169,6 @@ def set_online(self): def is_offline(self): return self._offline - def is_initialized(self): - return self._offline or self._config.use_ldd or self._update_processor.initialized - def flush(self): if self._offline: return @@ -183,7 +179,6 @@ def get_flag(self, key, user, default=False): return self.toggle(key, user, default) def toggle(self, key, user, default=False): - log.info("togglin..") default = self._config.get_default(key, default) def send_event(value): @@ -191,7 +186,6 @@ def send_event(value): 'user': user, 'value': value, 'default': default}) if self._offline: - # log warning? send_event(default) return default @@ -217,5 +211,4 @@ def _sanitize_user(self, user): if 'key' in user: user['key'] = str(user['key']) - __all__ = ['LDClient', 'Config'] diff --git a/ldclient/polling.py b/ldclient/polling.py index 9982bc2f..d88fecf5 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -1,38 +1,34 @@ -import threading +from threading import Thread from ldclient.interfaces import UpdateProcessor from ldclient.util import log +import time -# TODO account for drift- now we're just pausing 1 second in between requests -class PollingUpdateProcessor(UpdateProcessor): + +class PollingUpdateProcessor(Thread, UpdateProcessor): def __init__(self, api_key, config, requester, store): + Thread.__init__(self) self.daemon = True self._api_key = api_key self._config = config self._requester = requester self._store = store self._running = False - self._timer = threading.Timer(self._config.poll_interval, self.poll) - def start(self): + def run(self): if not self._running: - log.debug("Starting PollingUpdateProcessor") + log.debug("Starting PollingUpdateProcessor with request interval: " + str(self._config.poll_interval)) self._running = True - self.run() - - def run(self): - if self._running: - self._timer = threading.Timer(self._config.poll_interval, self.poll) - self._timer.start() - - def poll(self): - self._store.init(self._requester.getAll()) - self.run() + while self._running: + start_time = time.time() + self._store.init(self._requester.getAll()) + elapsed = time.time() - start_time + if elapsed < self._config.poll_interval: + time.sleep(self._config.poll_interval - elapsed) def initialized(self): return self._running and self._store.initialized def stop(self): - log.debug("Closing PollingUpdateProcessor") + log.debug("Stopping PollingUpdateProcessor") self._running = False - self._timer.cancel() From 2b7441022e9bca8b3964b0f073665916f59a85ec Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 29 Jun 2016 18:20:19 -0700 Subject: [PATCH 05/42] cleanup --- demo/demo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demo/demo.py b/demo/demo.py index 2b4e9c1a..4bd5cffc 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -14,7 +14,7 @@ root.addHandler(ch) if __name__ == '__main__': - apiKey = 'sdk-7c55610f-385f-46c5-a3a6-2fdc9ccf3034' + apiKey = 'your api key' config = Config(stream=False) client = LDClient(apiKey, config) user = {u'key': 'userKey'} From 22382ed46a6dfe147004e0e5a09366b603af357d Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 29 Jun 2016 18:32:38 -0700 Subject: [PATCH 06/42] Remove commented code. Fix some documentation --- ldclient/client.py | 2 -- ldclient/feature_store.py | 67 +-------------------------------------- ldclient/interfaces.py | 22 ++----------- ldclient/requester.py | 44 ------------------------- testing/test_ldclient.py | 3 +- 5 files changed, 4 insertions(+), 134 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index e72b415e..2598f172 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -194,13 +194,11 @@ def send_event(value): if 'key' in user and user['key']: feature = self._store.get(key) else: - # log warning? send_event(default) return default val = _evaluate(feature, user) if val is None: - # log warning? send_event(default) return default diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index d681d073..49790fda 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -67,69 +67,4 @@ def initialized(self): self._lock.rlock() return self._initialized finally: - self._lock.runlock() - - ### Old version: - # class InMemoryFeatureStore(FeatureStore): - # - # def __init__(self): - # self._lock = ReadWriteLock() - # self._initialized = False - # self._features = {} - # - # def get(self, key): - # try: - # self._lock.rlock() - # f = self._features.get(key) - # if f is None or 'deleted' in f and f['deleted']: - # return None - # return f - # finally: - # self._lock.runlock() - # - # def all(self): - # try: - # self._lock.rlock() - # return dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted']) - # finally: - # self._lock.runlock() - # - # def init(self, features): - # try: - # self._lock.lock() - # self._features = dict(features) - # self._initialized = True - # finally: - # self._lock.unlock() - # - # # noinspection PyShadowingNames - # def delete(self, key, version): - # try: - # self._lock.lock() - # f = self._features.get(key) - # if f is not None and f['version'] < version: - # f['deleted'] = True - # f['version'] = version - # elif f is None: - # f = {'deleted': True, 'version': version} - # self._features[key] = f - # finally: - # self._lock.unlock() - # - # def upsert(self, key, feature): - # try: - # self._lock.lock() - # f = self._features.get(key) - # if f is None or f['version'] < feature['version']: - # self._features[key] = feature - # log.debug("Updated feature {} to version {}".format(key, feature['version'])) - # finally: - # self._lock.unlock() - # - # @property - # def initialized(self): - # try: - # self._lock.rlock() - # return self._initialized - # finally: - # self._lock.runlock() \ No newline at end of file + self._lock.runlock() \ No newline at end of file diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 378e02fb..a1c2c09e 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -95,27 +95,9 @@ def is_alive(self): class UpdateProcessor(BackgroundOperation): """ - Responsible for retrieving Feature Flag updates from LaunchDarkly + Responsible for retrieving Feature Flag updates from LaunchDarkly and saving them to the feature store """ __metaclass__ = ABCMeta - # - # @abstractmethod - # def initialized(self): - # """ - # :rtype: Returns whether the processor has been initialized yet or not - # """ - # - # @abstractmethod - # def close(self): - # """ - # Closes the processor - # """ - # - # @abstractmethod - # def start(self): - # """ - # Starts the processor - # """ class EventConsumer(BackgroundOperation): @@ -133,7 +115,7 @@ def flush(self): class FeatureRequester(object): """ - Requests features if they aren't in the store + Requests features. """ __metaclass__ = ABCMeta diff --git a/ldclient/requester.py b/ldclient/requester.py index f47ae928..b1c6223f 100644 --- a/ldclient/requester.py +++ b/ldclient/requester.py @@ -38,50 +38,6 @@ def get(self, key): return feature -# class RequestsStreamProcessor(Thread, StreamProcessor): -# -# def __init__(self, api_key, config, store): -# Thread.__init__(self) -# self.daemon = True -# self._api_key = api_key -# self._config = config -# self._store = store -# self._running = False -# -# def run(self): -# log.debug("Starting stream processor") -# self._running = True -# hdrs = _stream_headers(self._api_key) -# uri = self._config.stream_uri + "/features" -# messages = SSEClient(uri, verify=self._config.verify, headers=hdrs) -# for msg in messages: -# if not self._running: -# break -# self.process_message(self._store, msg) -# -# def stop(self): -# self._running = False -# -# @staticmethod -# def process_message(store, msg): -# payload = json.loads(msg.data) -# log.debug("Recieved stream event {}".format(msg.event)) -# if msg.event == 'put': -# store.init(payload) -# elif msg.event == 'patch': -# key = payload['path'][1:] -# feature = payload['data'] -# log.debug("Updating feature {}".format(key)) -# store.upsert(key, feature) -# elif msg.event == 'delete': -# key = payload['path'][1:] -# # noinspection PyShadowingNames -# version = payload['version'] -# store.delete(key, version) -# else: -# log.warning('Unhandled event in stream processor: ' + msg.event) -# - class RequestsEventConsumer(Thread, EventConsumer): def __init__(self, event_queue, api_key, config): Thread.__init__(self) diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index e94c7106..510dc5f7 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -252,8 +252,7 @@ def test_defaults_and_online_no_default(): client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": "bar"}, consumer_class=MockConsumer, feature_requester_class=MockFeatureRequester)) assert "jim" == client.toggle('baz', user, default="jim") - assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e[ - 'key'] == u'baz' and e['user'] == user) + assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e['key'] == u'baz' and e['user'] == user) def test_exception_in_retrieval(): From 9e9fe5492e86d1d01276365fa2c9d5669a3c7e56 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Thu, 30 Jun 2016 08:09:33 -0700 Subject: [PATCH 07/42] Move offline to config so it is set once during initialization and only read after that. --- ldclient/client.py | 27 ++++++++++------------- ldd/test_ldd_twisted.py | 2 +- testing/test_ldclient.py | 47 +++++++++------------------------------- 3 files changed, 23 insertions(+), 53 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 2598f172..eda51fa5 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -26,6 +26,7 @@ GET_LATEST_FEATURES_PATH = '/api/eval/latest-features' STREAM_FEATURES_PATH = '/features' + class Config(object): def __init__(self, base_uri='https://app.launchdarkly.com', @@ -44,7 +45,8 @@ def __init__(self, use_ldd=False, feature_store=InMemoryFeatureStore(), feature_requester_class=FeatureRequesterImpl, - consumer_class=None): + consumer_class=None, + offline=False): """ :param update_processor_class: A factory for an UpdateProcessor implementation taking the api key, config, @@ -88,6 +90,7 @@ def __init__(self, self.verify = verify self.defaults = defaults self.events = events + self.offline = offline def get_default(self, key, default): return default if key not in self.defaults else self.defaults[key] @@ -105,7 +108,6 @@ def __init__(self, api_key, config=None): self._session = CacheControl(requests.Session()) self._queue = queue.Queue(self._config.capacity) self._consumer = None - self._offline = False self._lock = Lock() self._store = self._config.feature_store @@ -119,6 +121,10 @@ def __init__(self, api_key, config=None): api_key, self._config, self._feature_requester, self._store) """ :type: UpdateProcessor """ + if self._config.offline: + log.info("Started LaunchDarkly Client in offline mode") + return + self._update_processor.start() log.info("Started LaunchDarkly Client") @@ -140,7 +146,7 @@ def _stop_consumers(self): self._update_processor.stop() def _send(self, event): - if self._offline or not self._config.events: + if self._config.offline or not self._config.events: return self._check_consumer() event['creationDate'] = int(time.time() * 1000) @@ -158,19 +164,11 @@ def identify(self, user): self._sanitize_user(user) self._send({'kind': 'identify', 'key': user['key'], 'user': user}) - def set_offline(self): - self._offline = True - self._stop_consumers() - - def set_online(self): - self._offline = False - self._check_consumer() - def is_offline(self): - return self._offline + return self._config.offline def flush(self): - if self._offline: + if self._config.offline: return self._check_consumer() return self._consumer.flush() @@ -185,8 +183,7 @@ def send_event(value): self._send({'kind': 'feature', 'key': key, 'user': user, 'value': value, 'default': default}) - if self._offline: - send_event(default) + if self._config.offline: return default self._sanitize_user(user) diff --git a/ldd/test_ldd_twisted.py b/ldd/test_ldd_twisted.py index 00253338..cb33a139 100644 --- a/ldd/test_ldd_twisted.py +++ b/ldd/test_ldd_twisted.py @@ -29,7 +29,7 @@ def fin(): @pytest.inlineCallbacks def test_sse_init(stream): stream.queue.put(Event(event="put", data=feature("foo", "jim"))) - client = LDClient("apikey", TwistedConfig(stream=True, stream_processor_class=create_redis_ldd_processor, + client = LDClient("apikey", TwistedConfig(stream=True, update_processor_class=create_redis_ldd_processor, feature_requester_class=NoOpFeatureRequester, events=False)) yield wait_until(is_equal(lambda: client.toggle("foo", user('xyz'), "blah"), "jim")) diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 510dc5f7..2e9d58e3 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -55,6 +55,7 @@ def get(self, key): client = LDClient("API_KEY", Config("http://localhost:3000", feature_store=MockFeatureStore())) +offline_client = LDClient("API_KEY", Config("http://localhost:3000", feature_store=MockFeatureStore(), offline=True)) user = { u'key': u'xyz', @@ -114,7 +115,6 @@ def setup_function(function): u'bizzle': u'def' } } - client.set_online() client._queue = queue.Queue(10) client._consumer = mock_consumer() @@ -129,32 +129,19 @@ def wait_for_event(c, cb): return cb(e) -def test_set_offline(): - client.set_offline() - assert client.is_offline() == True - - -def test_set_online(): - client.set_offline() - client.set_online() - assert client.is_offline() == False - - def test_toggle(): assert client.toggle('feature.key', user, default=None) == True def test_toggle_offline(): - client.set_offline() - assert client.toggle('feature.key', user, default=None) == None + assert offline_client.toggle('feature.key', user, default=None) == None def test_toggle_event(): client.toggle('feature.key', user, default=None) def expected_event(e): - return e['kind'] == 'feature' and e['key'] == 'feature.key' and e['user'] == user and e['value'] == True and e[ - 'default'] == None + return e['kind'] == 'feature' and e['key'] == 'feature.key' and e['user'] == user and e['value'] == True and e['default'] == None assert expected_event(client._queue.get(False)) @@ -176,9 +163,8 @@ def expected_event(e): def test_toggle_event_offline(): - client.set_offline() - client.toggle('feature.key', user, default=None) - assert client._queue.empty() + offline_client.toggle('feature.key', user, default=None) + assert offline_client._queue.empty() def test_identify(): @@ -200,9 +186,7 @@ def expected_event(e): def test_identify_offline(): - client.set_offline() - client.identify(user) - assert client._queue.empty() + assert offline_client._queue.empty() def test_track(): @@ -225,15 +209,13 @@ def expected_event(e): def test_track_offline(): - client.set_offline() - client.track('my_event', user, 42) - assert client._queue.empty() + offline_client.track('my_event', user, 42) + assert offline_client._queue.empty() def test_defaults(): client = LDClient("API_KEY", Config( - "http://localhost:3000", defaults={"foo": "bar"})) - client.set_offline() + "http://localhost:3000", defaults={"foo": "bar"}, offline=True)) assert "bar" == client.toggle('foo', user, default=None) @@ -272,8 +254,7 @@ def getAll(self): def test_no_defaults(): - client.set_offline() - assert "bar" == client.toggle('foo', user, default="bar") + assert "bar" == offline_client.toggle('foo', user, default="bar") def drain(queue): @@ -289,11 +270,3 @@ def test_flush_empties_queue(): drain(client._queue) client.flush() assert client._queue.empty() - - -def test_flush_offline_does_not_empty_queue(): - client.track('my_event', user, 42) - client.track('my_event', user, 33) - client.set_offline() - client.flush() - assert not client._queue.empty() From 222f98115bdb9a7ae7a98fa84ea3639bd35f3c85 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Thu, 30 Jun 2016 09:34:28 -0700 Subject: [PATCH 08/42] Add log statements when returning default --- ldclient/client.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/ldclient/client.py b/ldclient/client.py index eda51fa5..2d770aac 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -191,12 +191,20 @@ def send_event(value): if 'key' in user and user['key']: feature = self._store.get(key) else: + send_event(default) + log.warning("Missing or empty User key when evaluating Feature Flag key: " + key + ". Returning default.") + return default + + if feature: + val = _evaluate(feature, user) + else: + log.warning("Feature Flag key: " + key + " not found in Feature Store. Returning default.") send_event(default) return default - val = _evaluate(feature, user) if val is None: send_event(default) + log.warning("Feature Flag key: " + key + " evaluation returned None. Returning default.") return default send_event(val) From ea768734c91b5a1f7f768a72560065c3ba26619c Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Thu, 30 Jun 2016 18:31:48 -0700 Subject: [PATCH 09/42] Rename --- ldclient/client.py | 22 +++++++++++----------- ldclient/twisted_impls.py | 2 +- testing/test_ldclient.py | 8 ++++---- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 2d770aac..71e82bc8 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -45,7 +45,7 @@ def __init__(self, use_ldd=False, feature_store=InMemoryFeatureStore(), feature_requester_class=FeatureRequesterImpl, - consumer_class=None, + event_consumer_class=None, offline=False): """ @@ -56,8 +56,8 @@ def __init__(self, :type feature_store: FeatureStore :param feature_requester_class: A factory for a FeatureRequester implementation taking the api key and config :type feature_requester_class: (str, Config, FeatureStore) -> FeatureRequester - :param consumer_class: A factory for an EventConsumer implementation taking the event queue, api key, and config - :type consumer_class: (queue.Queue, str, Config) -> EventConsumer + :param event_consumer_class: A factory for an EventConsumer implementation taking the event queue, api key, and config + :type event_consumer_class: (queue.Queue, str, Config) -> EventConsumer """ if defaults is None: defaults = {} @@ -81,7 +81,7 @@ def __init__(self, self.poll_interval = poll_interval self.use_ldd = use_ldd self.feature_store = feature_store - self.consumer_class = RequestsEventConsumer if not consumer_class else consumer_class + self.event_consumer_class = RequestsEventConsumer if not event_consumer_class else event_consumer_class self.feature_requester_class = FeatureRequesterImpl if not feature_requester_class else feature_requester_class self.connect = connect_timeout self.read_timeout = read_timeout @@ -107,7 +107,7 @@ def __init__(self, api_key, config=None): self._config = config or Config.default() self._session = CacheControl(requests.Session()) self._queue = queue.Queue(self._config.capacity) - self._consumer = None + self._event_consumer = None self._lock = Lock() self._store = self._config.feature_store @@ -134,14 +134,14 @@ def api_key(self): def _check_consumer(self): with self._lock: - if not self._consumer or not self._consumer.is_alive(): - self._consumer = self._config.consumer_class( + if not self._event_consumer or not self._event_consumer.is_alive(): + self._event_consumer = self._config.event_consumer_class( self._queue, self._api_key, self._config) - self._consumer.start() + self._event_consumer.start() def _stop_consumers(self): - if self._consumer and self._consumer.is_alive(): - self._consumer.stop() + if self._event_consumer and self._event_consumer.is_alive(): + self._event_consumer.stop() if self._update_processor and self._update_processor.is_alive(): self._update_processor.stop() @@ -171,7 +171,7 @@ def flush(self): if self._config.offline: return self._check_consumer() - return self._consumer.flush() + return self._event_consumer.flush() def get_flag(self, key, user, default=False): return self.toggle(key, user, default) diff --git a/ldclient/twisted_impls.py b/ldclient/twisted_impls.py index b6524037..f5efe100 100644 --- a/ldclient/twisted_impls.py +++ b/ldclient/twisted_impls.py @@ -59,7 +59,7 @@ class TwistedConfig(Config): def __init__(self, *args, **kwargs): self.update_processor_class = TwistedStreamProcessor - self.consumer_class = TwistedEventConsumer + self.event_consumer_class = TwistedEventConsumer self.feature_requester_class = TwistedHttpFeatureRequester super(TwistedConfig, self).__init__(*args, **kwargs) diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 2e9d58e3..7c9bc6b6 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -116,7 +116,7 @@ def setup_function(function): } } client._queue = queue.Queue(10) - client._consumer = mock_consumer() + client._event_consumer = mock_consumer() @pytest.fixture(autouse=True) @@ -222,7 +222,7 @@ def test_defaults(): def test_defaults_and_online(): expected = "bar" my_client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": expected}, - consumer_class=MockConsumer, feature_requester_class=MockFeatureRequester, + event_consumer_class=MockConsumer, feature_requester_class=MockFeatureRequester, feature_store=InMemoryFeatureStore())) actual = my_client.toggle('foo', user, default="originalDefault") print(str(actual)) @@ -232,7 +232,7 @@ def test_defaults_and_online(): def test_defaults_and_online_no_default(): client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": "bar"}, - consumer_class=MockConsumer, feature_requester_class=MockFeatureRequester)) + event_consumer_class=MockConsumer, feature_requester_class=MockFeatureRequester)) assert "jim" == client.toggle('baz', user, default="jim") assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e['key'] == u'baz' and e['user'] == user) @@ -248,7 +248,7 @@ def getAll(self): client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": "bar"}, feature_store=InMemoryFeatureStore(), feature_requester_class=ExceptionFeatureRequester, - consumer_class=MockConsumer)) + event_consumer_class=MockConsumer)) assert "bar" == client.toggle('foo', user, default="jim") assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e['key'] == u'foo' and e['user'] == user) From f29e2a096fd6f26b5085f27b98420f9ed3352445 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Fri, 1 Jul 2016 10:26:11 -0700 Subject: [PATCH 10/42] Rename and move some things. --- ldclient/client.py | 6 +-- ldclient/event_consumer.py | 100 +++++++++++++++++++++++++++++++++++++ ldclient/interfaces.py | 4 +- ldclient/polling.py | 2 +- ldclient/requester.py | 98 +----------------------------------- ldclient/streaming.py | 2 +- ldclient/twisted_impls.py | 2 +- testing/test_ldclient.py | 4 +- 8 files changed, 112 insertions(+), 106 deletions(-) create mode 100644 ldclient/event_consumer.py diff --git a/ldclient/client.py b/ldclient/client.py index 71e82bc8..11851c25 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -5,11 +5,11 @@ import requests from builtins import object +from ldclient.event_consumer import EventConsumerImpl from ldclient.feature_store import InMemoryFeatureStore from ldclient.interfaces import FeatureStore from ldclient.polling import PollingUpdateProcessor -from ldclient.requester import RequestsEventConsumer, FeatureRequesterImpl - +from ldclient.requester import FeatureRequesterImpl from ldclient.streaming import StreamingUpdateProcessor from ldclient.util import check_uwsgi, _evaluate, log @@ -81,7 +81,7 @@ def __init__(self, self.poll_interval = poll_interval self.use_ldd = use_ldd self.feature_store = feature_store - self.event_consumer_class = RequestsEventConsumer if not event_consumer_class else event_consumer_class + self.event_consumer_class = EventConsumerImpl if not event_consumer_class else event_consumer_class self.feature_requester_class = FeatureRequesterImpl if not feature_requester_class else feature_requester_class self.connect = connect_timeout self.read_timeout = read_timeout diff --git a/ldclient/event_consumer.py b/ldclient/event_consumer.py new file mode 100644 index 00000000..11a5ee5f --- /dev/null +++ b/ldclient/event_consumer.py @@ -0,0 +1,100 @@ +from __future__ import absolute_import + +import errno +import json +from threading import Thread + +import requests +from requests.packages.urllib3.exceptions import ProtocolError + +from ldclient.interfaces import EventConsumer +from ldclient.util import _headers +from ldclient.util import log + + +class EventConsumerImpl(Thread, EventConsumer): + def __init__(self, event_queue, api_key, config): + Thread.__init__(self) + self._session = requests.Session() + self.daemon = True + self._api_key = api_key + self._config = config + self._queue = event_queue + self._running = False + + def run(self): + log.debug("Starting event consumer") + self._running = True + while self._running: + self.send() + + def stop(self): + self._running = False + + def flush(self): + self._queue.join() + + def send_batch(self, events): + def do_send(should_retry): + # noinspection PyBroadException + try: + if isinstance(events, dict): + body = [events] + else: + body = events + hdrs = _headers(self._api_key) + uri = self._config.events_uri + '/bulk' + r = self._session.post(uri, headers=hdrs, timeout=(self._config.connect, self._config.read_timeout), + data=json.dumps(body)) + r.raise_for_status() + except ProtocolError as e: + inner = e.args[1] + if inner.errno == errno.ECONNRESET and should_retry: + log.warning( + 'ProtocolError exception caught while sending events. Retrying.') + do_send(False) + else: + log.exception( + 'Unhandled exception in event consumer. Analytics events were not processed.') + except: + log.exception( + 'Unhandled exception in event consumer. Analytics events were not processed.') + + try: + do_send(True) + finally: + for _ in events: + self._queue.task_done() + + def send(self): + events = self.next() + + if len(events) == 0: + return + else: + self.send_batch(events) + + def next(self): + q = self._queue + items = [] + + item = self.next_item() + if item is None: + return items + + items.append(item) + while len(items) < self._config.upload_limit and not q.empty(): + item = self.next_item() + if item: + items.append(item) + + return items + + def next_item(self): + q = self._queue + # noinspection PyBroadException + try: + item = q.get(block=True, timeout=5) + return item + except Exception: + return None diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index a1c2c09e..26a991ca 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -120,12 +120,12 @@ class FeatureRequester(object): __metaclass__ = ABCMeta @abstractmethod - def getAll(self): + def get_all(self): """ Gets all feature flags. """ - def getOne(self, key): + def get_one(self, key): """ Gets one Feature flag :return: diff --git a/ldclient/polling.py b/ldclient/polling.py index d88fecf5..ad120a75 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -21,7 +21,7 @@ def run(self): self._running = True while self._running: start_time = time.time() - self._store.init(self._requester.getAll()) + self._store.init(self._requester.get_all()) elapsed = time.time() - start_time if elapsed < self._config.poll_interval: time.sleep(self._config.poll_interval - elapsed) diff --git a/ldclient/requester.py b/ldclient/requester.py index b1c6223f..c30178ad 100644 --- a/ldclient/requester.py +++ b/ldclient/requester.py @@ -1,16 +1,10 @@ from __future__ import absolute_import -import errno -import json -from threading import Thread - import requests from cachecontrol import CacheControl -from requests.packages.urllib3.exceptions import ProtocolError -from ldclient.interfaces import EventConsumer, FeatureRequester +from ldclient.interfaces import FeatureRequester from ldclient.util import _headers -from ldclient.util import log class FeatureRequesterImpl(FeatureRequester): @@ -19,7 +13,7 @@ def __init__(self, api_key, config): self._session = CacheControl(requests.Session()) self._config = config - def getAll(self): + def get_all(self): hdrs = _headers(self._api_key) uri = self._config.get_latest_features_uri r = self._session.get(uri, headers=hdrs, timeout=( @@ -36,91 +30,3 @@ def get(self, key): r.raise_for_status() feature = r.json() return feature - - -class RequestsEventConsumer(Thread, EventConsumer): - def __init__(self, event_queue, api_key, config): - Thread.__init__(self) - self._session = requests.Session() - self.daemon = True - self._api_key = api_key - self._config = config - self._queue = event_queue - self._running = False - - def run(self): - log.debug("Starting event consumer") - self._running = True - while self._running: - self.send() - - def stop(self): - self._running = False - - def flush(self): - self._queue.join() - - def send_batch(self, events): - def do_send(should_retry): - # noinspection PyBroadException - try: - if isinstance(events, dict): - body = [events] - else: - body = events - hdrs = _headers(self._api_key) - uri = self._config.events_uri + '/bulk' - r = self._session.post(uri, headers=hdrs, timeout=(self._config.connect, self._config.read_timeout), - data=json.dumps(body)) - r.raise_for_status() - except ProtocolError as e: - inner = e.args[1] - if inner.errno == errno.ECONNRESET and should_retry: - log.warning( - 'ProtocolError exception caught while sending events. Retrying.') - do_send(False) - else: - log.exception( - 'Unhandled exception in event consumer. Analytics events were not processed.') - except: - log.exception( - 'Unhandled exception in event consumer. Analytics events were not processed.') - - try: - do_send(True) - finally: - for _ in events: - self._queue.task_done() - - def send(self): - events = self.next() - - if len(events) == 0: - return - else: - self.send_batch(events) - - def next(self): - q = self._queue - items = [] - - item = self.next_item() - if item is None: - return items - - items.append(item) - while len(items) < self._config.upload_limit and not q.empty(): - item = self.next_item() - if item: - items.append(item) - - return items - - def next_item(self): - q = self._queue - # noinspection PyBroadException - try: - item = q.get(block=True, timeout=5) - return item - except Exception: - return None diff --git a/ldclient/streaming.py b/ldclient/streaming.py index d51d6919..5b8a6208 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -50,7 +50,7 @@ def process_message(store, requester, msg): key = payload['data'] store.upsert(key, requester.get(key)) elif msg.event == "indirect/put": - store.init(requester.getAll()) + store.init(requester.get_all()) elif msg.event == 'delete': key = payload['path'][1:] # noinspection PyShadowingNames diff --git a/ldclient/twisted_impls.py b/ldclient/twisted_impls.py index f5efe100..e030b0a7 100644 --- a/ldclient/twisted_impls.py +++ b/ldclient/twisted_impls.py @@ -22,7 +22,7 @@ def __init__(self, api_key, config): self._session = CacheControl(txrequests.Session()) self._config = config - def getAll(self): + def get_all(self): @defer.inlineCallbacks def run(should_retry): # noinspection PyBroadException diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 7c9bc6b6..c9a9686e 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -95,7 +95,7 @@ class MockFeatureRequester(FeatureRequester): def __init__(self, *_): pass - def getAll(self): + def get_all(self): pass @@ -242,7 +242,7 @@ class ExceptionFeatureRequester(FeatureRequester): def __init__(self, *_): pass - def getAll(self): + def get_all(self): raise Exception("blah") client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": "bar"}, From c34a29fa38ad7227fb88dcce8615ca79c5fc848b Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Fri, 1 Jul 2016 17:44:09 -0700 Subject: [PATCH 11/42] Rename some config params. Add Redis feature store + tests --- ldclient/client.py | 24 ++-- .../{requester.py => feature_requester.py} | 0 ldclient/redis_feature_store.py | 118 +++++++++++++++++ ldclient/streaming.py | 2 +- ldclient/twisted_impls.py | 2 +- ldclient/twisted_redis.py | 4 +- ldclient/twisted_sse.py | 6 +- ldd/test_ldd.py | 2 +- requirements.txt | 3 +- testing/test_feature_store.py | 119 ++++++++++++++++++ testing/test_inmemoryfeaturestore.py | 82 ------------ twisted-requirements.txt | 1 - 12 files changed, 260 insertions(+), 103 deletions(-) rename ldclient/{requester.py => feature_requester.py} (100%) create mode 100644 ldclient/redis_feature_store.py create mode 100644 testing/test_feature_store.py delete mode 100644 testing/test_inmemoryfeaturestore.py diff --git a/ldclient/client.py b/ldclient/client.py index 11851c25..ee64b719 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -6,10 +6,10 @@ from builtins import object from ldclient.event_consumer import EventConsumerImpl +from ldclient.feature_requester import FeatureRequesterImpl from ldclient.feature_store import InMemoryFeatureStore from ldclient.interfaces import FeatureStore from ldclient.polling import PollingUpdateProcessor -from ldclient.requester import FeatureRequesterImpl from ldclient.streaming import StreamingUpdateProcessor from ldclient.util import check_uwsgi, _evaluate, log @@ -33,13 +33,13 @@ def __init__(self, events_uri='https://events.launchdarkly.com', connect_timeout=2, read_timeout=10, - upload_limit=100, - capacity=10000, + events_upload_max_batch_size=100, + events_max_pending=10000, stream_uri='https://stream.launchdarkly.com', stream=True, - verify=True, + verify_ssl=True, defaults=None, - events=True, + events_enabled=True, update_processor_class=None, poll_interval=1, use_ldd=False, @@ -83,13 +83,13 @@ def __init__(self, self.feature_store = feature_store self.event_consumer_class = EventConsumerImpl if not event_consumer_class else event_consumer_class self.feature_requester_class = FeatureRequesterImpl if not feature_requester_class else feature_requester_class - self.connect = connect_timeout + self.connect_timeout = connect_timeout self.read_timeout = read_timeout - self.upload_limit = upload_limit - self.capacity = capacity - self.verify = verify + self.events_enabled = events_enabled + self.events_upload_max_batch_size = events_upload_max_batch_size + self.events_max_pending = events_max_pending + self.verify_ssl = verify_ssl self.defaults = defaults - self.events = events self.offline = offline def get_default(self, key, default): @@ -106,7 +106,7 @@ def __init__(self, api_key, config=None): self._api_key = api_key self._config = config or Config.default() self._session = CacheControl(requests.Session()) - self._queue = queue.Queue(self._config.capacity) + self._queue = queue.Queue(self._config.events_max_pending) self._event_consumer = None self._lock = Lock() @@ -146,7 +146,7 @@ def _stop_consumers(self): self._update_processor.stop() def _send(self, event): - if self._config.offline or not self._config.events: + if self._config.offline or not self._config.events_enabled: return self._check_consumer() event['creationDate'] = int(time.time() * 1000) diff --git a/ldclient/requester.py b/ldclient/feature_requester.py similarity index 100% rename from ldclient/requester.py rename to ldclient/feature_requester.py diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py new file mode 100644 index 00000000..d7b254db --- /dev/null +++ b/ldclient/redis_feature_store.py @@ -0,0 +1,118 @@ +import json + +import redis + +from ldclient.expiringdict import ExpiringDict +from ldclient.interfaces import FeatureStore + +INIT_KEY = "$initialized$" + + +class ForgetfulDict(dict): + def __setitem__(self, key, value): + pass + + +class RedisFeatureStore(FeatureStore): + def __init__(self, + host='localhost', + port=6379, + prefix='launchdarkly', + max_connections=16, + expiration=15, + capacity=1000): + + self._features_key = "{}:features".format(prefix) + self._cache = ForgetfulDict() if expiration == 0 else ExpiringDict(max_len=capacity, + max_age_seconds=expiration) + self._pool = redis.ConnectionPool(max_connections=max_connections, + host=host, + port=port, + db=0) + + def init(self, features): + pipe = redis.Redis(connection_pool=self._pool).pipeline() + pipe.delete(self._features_key) + + self._cache.clear() + for k, f in features.iteritems(): + f_json = json.dumps(f, encoding='utf-8') + pipe.hset(self._features_key, k, f_json) + self._cache[k] = f + pipe.execute() + + def all(self): + r = redis.Redis(connection_pool=self._pool) + all_features = r.hgetall(self._features_key) + results = {} + for f_json in all_features: + f = json.loads(f_json.decode('utf-8')) + if 'deleted' in f and f['deleted'] is False: + results[f['key']] = f + return results + + def get(self, key): + f = self._cache.get(key) + if f: + # reset ttl + self._cache[key] = f + if 'deleted' in f and f['deleted']: + return None + return f + + r = redis.Redis(connection_pool=self._pool) + f_json = r.hget(self._features_key, key) + if f_json: + f = json.loads(f_json.decode('utf-8')) + if f: + if 'deleted' in f and f['deleted']: + return None + self._cache[key] = f + return f + + return None + + def delete(self, key, version): + r = redis.Redis(connection_pool=self._pool) + r.watch(self._features_key) + f_json = r.hget(self._features_key, key) + if f_json: + f = json.loads(f_json.decode('utf-8')) + if f is not None and f['version'] < version: + f['deleted'] = True + f['version'] = version + elif f is None: + f = {'deleted': True, 'version': version} + f_json = json.dumps(f, encoding='utf-8') + r.hset(self._features_key, key, f_json) + self._cache[key] = f + r.unwatch() + + @property + def initialized(self): + initialized = self._cache.get(INIT_KEY) + if initialized: + # reset ttl + self._cache[INIT_KEY] = True + return True + + r = redis.Redis(connection_pool=self._pool) + if r.exists(self._features_key): + self._cache[INIT_KEY] = True + return True + return False + + def upsert(self, key, feature): + r = redis.Redis(connection_pool=self._pool) + r.watch(self._features_key) + old = self.get(key) + if old: + if old['version'] >= feature['version']: + r.unwatch() + return + + feature_json = json.dumps(feature, encoding='utf-8') + r.hset(self._features_key, key, feature_json) + self._cache[key] = feature + r.unwatch() + diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 5b8a6208..dcdc4e5d 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -23,7 +23,7 @@ def run(self): self._running = True hdrs = _stream_headers(self._api_key) uri = self._config.stream_features_uri - messages = SSEClient(uri, verify=self._config.verify, headers=hdrs) + messages = SSEClient(uri, verify=self._config.verify_ssl, headers=hdrs) for msg in messages: if not self._running: break diff --git a/ldclient/twisted_impls.py b/ldclient/twisted_impls.py index e030b0a7..fefcda39 100644 --- a/ldclient/twisted_impls.py +++ b/ldclient/twisted_impls.py @@ -73,7 +73,7 @@ def __init__(self, api_key, config, store, requester): self._requester = requester self.sse_client = TwistedSSEClient(config.stream_features_uri, headers=_stream_headers(api_key, "PythonTwistedClient"), - verify=config.verify, + verify_ssl=config.verify_ssl, on_event=partial(StreamingUpdateProcessor.process_message, self._store, self._requester)) diff --git a/ldclient/twisted_redis.py b/ldclient/twisted_redis.py index c31c49dd..a0e81957 100644 --- a/ldclient/twisted_redis.py +++ b/ldclient/twisted_redis.py @@ -1,8 +1,10 @@ import json -from ldclient.interfaces import StreamProcessor, UpdateProcessor + from twisted.internet import task, defer, protocol, reactor from txredis.client import RedisClient +from ldclient.interfaces import UpdateProcessor + # noinspection PyUnusedLocal def create_redis_ldd_processor(api_key, config, store, **kwargs): diff --git a/ldclient/twisted_sse.py b/ldclient/twisted_sse.py index 745d7f20..f76bf3f8 100644 --- a/ldclient/twisted_sse.py +++ b/ldclient/twisted_sse.py @@ -17,9 +17,9 @@ def getContext(self, *_): class TwistedSSEClient(object): - def __init__(self, url, headers, verify, on_event): + def __init__(self, url, headers, verify_ssl, on_event): self.url = url + "/features" - self.verify = verify + self.verify_ssl = verify_ssl self.headers = headers self.on_event = on_event self.on_error_retry = 30 @@ -53,7 +53,7 @@ def connect(self, last_id=None): headers = dict([(x, [y.encode('utf-8')]) for x, y in headers.items()]) url = self.url.encode('utf-8') from twisted.internet import reactor - if self.verify: + if self.verify_ssl: agent = Agent(reactor) else: agent = Agent(reactor, NoValidationContextFactory()) diff --git a/ldd/test_ldd.py b/ldd/test_ldd.py index 46bb9e44..060f760b 100644 --- a/ldd/test_ldd.py +++ b/ldd/test_ldd.py @@ -28,7 +28,7 @@ def fin(): def test_sse_init(stream): stream.queue.put(Event(event="put", data=feature("foo", "jim"))) client = LDClient("apikey", Config(feature_requester_class=partial(create_redis_ldd_requester, expiration=0), - events=False)) + events_enabled=False)) wait_until(lambda: client.toggle( "foo", user('xyz'), "blah") == "jim", timeout=10) diff --git a/requirements.txt b/requirements.txt index 5295651d..86e4473d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ CacheControl>=0.10.2 requests>=2.4.0 future>=0.14.3 -sseclient>=0.0.9 \ No newline at end of file +sseclient>=0.0.9 +redis>=2.10.5 \ No newline at end of file diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py new file mode 100644 index 00000000..e0c47173 --- /dev/null +++ b/testing/test_feature_store.py @@ -0,0 +1,119 @@ +import redis + +from ldclient.feature_store import InMemoryFeatureStore +import pytest + +from ldclient.redis_feature_store import RedisFeatureStore, INIT_KEY + + +class TestFeatureStore: + params = [InMemoryFeatureStore()] + test_redis = True + redis_host = 'localhost' + redis_port = 6379 + + def in_memory(self): + return InMemoryFeatureStore() + + def redis_with_local_cache(self): + r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) + r.delete("launchdarkly:features") + return RedisFeatureStore(host=self.redis_host, port=self.redis_port) + + def redis_no_local_cache(self): + r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) + r.delete("launchdarkly:features") + return RedisFeatureStore(host=self.redis_host, port=self.redis_port, expiration=0) + + params = [in_memory, redis_with_local_cache, redis_no_local_cache] + + # @classmethod + # def setup_class(cls): + # # test_redis = True + # # if test_redis: + # cls.redis_host = 'localhost' + # cls.redis_port = 6379 + # cls.params = [InMemoryFeatureStore(), RedisFeatureStore(host=cls.redis_host, port=cls.redis_port)] + + @pytest.fixture(params=params) + def store(self, request): + return request.param(self) + + @staticmethod + def make_feature(key, ver): + return { + u'key': key, + u'version': ver, + u'salt': u'abc', + u'on': True, + u'variations': [ + { + u'value': True, + u'weight': 100, + u'targets': [] + }, + { + u'value': False, + u'weight': 0, + u'targets': [] + } + ] + } + + def base_initialized_store(self, store): + store.init({ + 'foo': self.make_feature('foo', 10), + 'bar': self.make_feature('bar', 10), + }) + return store + + def test_not_initially_initialized(self, store): + assert store.initialized is False + + def test_initialized(self, store): + store = self.base_initialized_store(store) + assert store.initialized is True + + def test_get_existing_feature(self, store): + store = self.base_initialized_store(store) + expected = self.make_feature('foo', 10) + assert store.get('foo') == expected + + def test_get_nonexisting_feature(self, store): + store = self.base_initialized_store(store) + assert store.get('biz') is None + + def test_upsert_with_newer_version(self, store): + store = self.base_initialized_store(store) + new_ver = self.make_feature('foo', 11) + store.upsert('foo', new_ver) + assert store.get('foo') == new_ver + + def test_upsert_with_older_version(self, store): + store = self.base_initialized_store(store) + new_ver = self.make_feature('foo', 9) + expected = self.make_feature('foo', 10) + store.upsert('foo', new_ver) + assert store.get('foo') == expected + + def test_upsert_with_new_feature(self, store): + store = self.base_initialized_store(store) + new_ver = self.make_feature('biz', 1) + store.upsert('biz', new_ver) + assert store.get('biz') == new_ver + + def test_delete_with_newer_version(self, store): + store = self.base_initialized_store(store) + store.delete('foo', 11) + assert store.get('foo') is None + + def test_delete_unknown_feature(self, store): + store = self.base_initialized_store(store) + store.delete('biz', 11) + assert store.get('biz') is None + + def test_delete_with_older_version(self, store): + store = self.base_initialized_store(store) + store.delete('foo', 9) + expected = self.make_feature('foo', 10) + assert store.get('foo') == expected diff --git a/testing/test_inmemoryfeaturestore.py b/testing/test_inmemoryfeaturestore.py deleted file mode 100644 index 14320a49..00000000 --- a/testing/test_inmemoryfeaturestore.py +++ /dev/null @@ -1,82 +0,0 @@ -from ldclient.feature_store import InMemoryFeatureStore -import pytest - -def make_feature(key, ver): - return { - u'key': key, - u'version': ver, - u'salt': u'abc', - u'on': True, - u'variations': [ - { - u'value': True, - u'weight': 100, - u'targets': [] - }, - { - u'value': False, - u'weight': 0, - u'targets': [] - } - ] - } - -def base_initialized_store(): - store = InMemoryFeatureStore() - store.init({ - 'foo': make_feature('foo', 10), - 'bar': make_feature('bar', 10), - }) - return store - -def test_not_initially_initialized(): - store = InMemoryFeatureStore() - assert store.initialized == False - -def test_initialized(): - store = base_initialized_store() - assert store.initialized == True - -def test_get_existing_feature(): - store = base_initialized_store() - expected = make_feature('foo', 10) - assert store.get('foo') == expected - -def test_get_nonexisting_feature(): - store = base_initialized_store() - assert store.get('biz') is None - -def test_upsert_with_newer_version(): - store = base_initialized_store() - new_ver = make_feature('foo', 11) - store.upsert('foo', new_ver) - assert store.get('foo') == new_ver - -def test_upsert_with_older_version(): - store = base_initialized_store() - new_ver = make_feature('foo', 9) - expected = make_feature('foo', 10) - store.upsert('foo', new_ver) - assert store.get('foo') == expected - -def test_upsert_with_new_feature(): - store = base_initialized_store() - new_ver = make_feature('biz', 1) - store.upsert('biz', new_ver) - assert store.get('biz') == new_ver - -def test_delete_with_newer_version(): - store = base_initialized_store() - store.delete('foo', 11) - assert store.get('foo') is None - -def test_delete_unknown_feature(): - store = base_initialized_store() - store.delete('biz', 11) - assert store.get('biz') is None - -def test_delete_with_older_version(): - store = base_initialized_store() - store.delete('foo', 9) - expected = make_feature('foo', 10) - assert store.get('foo') == expected \ No newline at end of file diff --git a/twisted-requirements.txt b/twisted-requirements.txt index fd925dba..787ab140 100644 --- a/twisted-requirements.txt +++ b/twisted-requirements.txt @@ -1,5 +1,4 @@ txrequests>=0.9 pyOpenSSL>=0.14 -txredis>=2.3 cryptography>=1.0 service_identity>=16.0 \ No newline at end of file From d8ae8aa4c6bab0699031f057b30673d9c2f68da0 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Fri, 1 Jul 2016 17:48:51 -0700 Subject: [PATCH 12/42] update some redis-related things --- CONTRIBUTING.md | 2 +- circle.yml | 3 +++ testing/test_feature_store.py | 15 ++------------- 3 files changed, 6 insertions(+), 14 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 437ea54d..b564861d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,6 +18,6 @@ Development information (for developing this module itself) pip install -r test-requirements.txt pip install -r twisted-requirements.txt -1. Run tests: +1. Run tests: You'll need redis running locally on its default port of 6379. $ py.test testing diff --git a/circle.yml b/circle.yml index 393d32cf..7250c793 100644 --- a/circle.yml +++ b/circle.yml @@ -1,3 +1,6 @@ +machine: + services: + - redis dependencies: pre: - pyenv shell 2.7.10; $(pyenv which pip) install --upgrade pip setuptools diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index e0c47173..338e6373 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -1,14 +1,11 @@ +import pytest import redis from ldclient.feature_store import InMemoryFeatureStore -import pytest - -from ldclient.redis_feature_store import RedisFeatureStore, INIT_KEY +from ldclient.redis_feature_store import RedisFeatureStore class TestFeatureStore: - params = [InMemoryFeatureStore()] - test_redis = True redis_host = 'localhost' redis_port = 6379 @@ -27,14 +24,6 @@ def redis_no_local_cache(self): params = [in_memory, redis_with_local_cache, redis_no_local_cache] - # @classmethod - # def setup_class(cls): - # # test_redis = True - # # if test_redis: - # cls.redis_host = 'localhost' - # cls.redis_port = 6379 - # cls.params = [InMemoryFeatureStore(), RedisFeatureStore(host=cls.redis_host, port=cls.redis_port)] - @pytest.fixture(params=params) def store(self, request): return request.param(self) From b6b415158837b976eebe07658745ba06f22f0775 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Fri, 1 Jul 2016 18:00:27 -0700 Subject: [PATCH 13/42] Add wait loop at start --- demo/demo.py | 1 - ldclient/client.py | 9 ++++++++- ldclient/feature_requester.py | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/demo/demo.py b/demo/demo.py index 4bd5cffc..09bf6908 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -18,7 +18,6 @@ config = Config(stream=False) client = LDClient(apiKey, config) user = {u'key': 'userKey'} - time.sleep(5) print(client.toggle("update-app", user, False)) print(client.api_key) diff --git a/ldclient/client.py b/ldclient/client.py index ee64b719..90a129c3 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -101,7 +101,7 @@ def default(cls): class LDClient(object): - def __init__(self, api_key, config=None): + def __init__(self, api_key, config=None, start_wait=5): check_uwsgi() self._api_key = api_key self._config = config or Config.default() @@ -125,7 +125,14 @@ def __init__(self, api_key, config=None): log.info("Started LaunchDarkly Client in offline mode") return + start_time = time.time() self._update_processor.start() + while not self._update_processor.initialized(): + if time.time() - start_time > start_wait: + log.warn("Timeout encountered waiting for LaunchDarkly Client initialization") + return + time.sleep(0.1) + log.info("Started LaunchDarkly Client") @property diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index c30178ad..203be2fe 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -17,7 +17,7 @@ def get_all(self): hdrs = _headers(self._api_key) uri = self._config.get_latest_features_uri r = self._session.get(uri, headers=hdrs, timeout=( - self._config.connect, self._config.read_timeout)) + self._config.connect_timeout, self._config.read_timeout)) r.raise_for_status() features = r.json() return features From 40f5443cefdb14b61df3c6abdad8bb4d1cd68d84 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Fri, 1 Jul 2016 18:15:15 -0700 Subject: [PATCH 14/42] clean up uris, etc --- ldclient/client.py | 5 ++--- ldclient/event_consumer.py | 2 +- ldclient/streaming.py | 2 +- ldclient/twisted_impls.py | 7 ++++--- redis-requirements.txt | 2 +- requirements.txt | 3 +-- 6 files changed, 10 insertions(+), 11 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 90a129c3..aa3516b6 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -64,9 +64,8 @@ def __init__(self, self.base_uri = base_uri.rstrip('\\') self.get_latest_features_uri = self.base_uri + GET_LATEST_FEATURES_PATH - self.events_uri = events_uri.rstrip('\\') - self.stream_uri = stream_uri.rstrip('\\') - self.stream_features_uri = self.stream_uri + STREAM_FEATURES_PATH + self.events_uri = events_uri.rstrip('\\') + '/bulk' + self.stream_uri = stream_uri.rstrip('\\') + STREAM_FEATURES_PATH if update_processor_class: self.update_processor_class = update_processor_class diff --git a/ldclient/event_consumer.py b/ldclient/event_consumer.py index 11a5ee5f..b4c5ac72 100644 --- a/ldclient/event_consumer.py +++ b/ldclient/event_consumer.py @@ -43,7 +43,7 @@ def do_send(should_retry): else: body = events hdrs = _headers(self._api_key) - uri = self._config.events_uri + '/bulk' + uri = self._config.events_uri r = self._session.post(uri, headers=hdrs, timeout=(self._config.connect, self._config.read_timeout), data=json.dumps(body)) r.raise_for_status() diff --git a/ldclient/streaming.py b/ldclient/streaming.py index dcdc4e5d..93a8a6db 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -22,7 +22,7 @@ def run(self): log.debug("Starting StreamingUpdateProcessor") self._running = True hdrs = _stream_headers(self._api_key) - uri = self._config.stream_features_uri + uri = self._config.stream_uri messages = SSEClient(uri, verify=self._config.verify_ssl, headers=hdrs) for msg in messages: if not self._running: diff --git a/ldclient/twisted_impls.py b/ldclient/twisted_impls.py index fefcda39..018724c0 100644 --- a/ldclient/twisted_impls.py +++ b/ldclient/twisted_impls.py @@ -71,7 +71,7 @@ def close(self): def __init__(self, api_key, config, store, requester): self._store = store self._requester = requester - self.sse_client = TwistedSSEClient(config.stream_features_uri, + self.sse_client = TwistedSSEClient(config.stream_uri, headers=_stream_headers(api_key, "PythonTwistedClient"), verify_ssl=config.verify_ssl, on_event=partial(StreamingUpdateProcessor.process_message, @@ -144,8 +144,9 @@ def do_send(should_retry): else: body = events hdrs = _headers(self._api_key) - uri = self._config.events_uri + '/bulk' - r = yield self._session.post(uri, headers=hdrs, timeout=(self._config.connect, self._config.read), + r = yield self._session.post(self._config.events_uri, + headers=hdrs, + timeout=(self._config.connect, self._config.read), data=json.dumps(body)) r.raise_for_status() except ProtocolError as e: diff --git a/redis-requirements.txt b/redis-requirements.txt index dc4f9bfd..e3fc618b 100644 --- a/redis-requirements.txt +++ b/redis-requirements.txt @@ -1 +1 @@ -redis>=2.10 +redis>=2.10.5 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 86e4473d..5295651d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,4 @@ CacheControl>=0.10.2 requests>=2.4.0 future>=0.14.3 -sseclient>=0.0.9 -redis>=2.10.5 \ No newline at end of file +sseclient>=0.0.9 \ No newline at end of file From 110499fb6c2029c2302548ffc46e3cf22aea30d9 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Fri, 1 Jul 2016 19:41:41 -0700 Subject: [PATCH 15/42] Rename more things. Remove some outdated twisted impls. --- demo/demo.py | 9 ++--- ldclient/client.py | 12 ++++++- ldclient/event_consumer.py | 6 ++-- ldclient/feature_requester.py | 6 ++-- ldclient/redis_requester.py | 57 ----------------------------- ldclient/twisted_redis.py | 67 ----------------------------------- ldclient/twisted_sse.py | 2 +- testing/server_util.py | 1 - testing/test_ldclient.py | 1 - 9 files changed, 25 insertions(+), 136 deletions(-) delete mode 100644 ldclient/redis_requester.py delete mode 100644 ldclient/twisted_redis.py diff --git a/demo/demo.py b/demo/demo.py index 09bf6908..442ff5fa 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -15,10 +15,11 @@ if __name__ == '__main__': apiKey = 'your api key' - config = Config(stream=False) - client = LDClient(apiKey, config) + client = LDClient(apiKey) + print(client.api_key) + user = {u'key': 'userKey'} print(client.toggle("update-app", user, False)) - print(client.api_key) - client._stop_consumers() + time.sleep(10) + client.close() diff --git a/ldclient/client.py b/ldclient/client.py index aa3516b6..9b5825f8 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -124,6 +124,13 @@ def __init__(self, api_key, config=None, start_wait=5): log.info("Started LaunchDarkly Client in offline mode") return + if self._config.use_ldd: + if self._store.__class__ == "RedisFeatureStore": + log.info("Started LaunchDarkly Client in LDD mode") + return + log.error("LDD mode requires a RedisFeatureStore.") + return + start_time = time.time() self._update_processor.start() while not self._update_processor.initialized(): @@ -145,7 +152,10 @@ def _check_consumer(self): self._queue, self._api_key, self._config) self._event_consumer.start() - def _stop_consumers(self): + def close(self): + log.info("Closing LaunchDarkly client..") + if self.is_offline(): + return if self._event_consumer and self._event_consumer.is_alive(): self._event_consumer.stop() if self._update_processor and self._update_processor.is_alive(): diff --git a/ldclient/event_consumer.py b/ldclient/event_consumer.py index b4c5ac72..1990285e 100644 --- a/ldclient/event_consumer.py +++ b/ldclient/event_consumer.py @@ -44,7 +44,9 @@ def do_send(should_retry): body = events hdrs = _headers(self._api_key) uri = self._config.events_uri - r = self._session.post(uri, headers=hdrs, timeout=(self._config.connect, self._config.read_timeout), + r = self._session.post(uri, + headers=hdrs, + timeout=(self._config.connect_timeout, self._config.read_timeout), data=json.dumps(body)) r.raise_for_status() except ProtocolError as e: @@ -83,7 +85,7 @@ def next(self): return items items.append(item) - while len(items) < self._config.upload_limit and not q.empty(): + while len(items) < self._config.events_upload_max_batch_size and not q.empty(): item = self.next_item() if item: items.append(item) diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 203be2fe..1a31cb54 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -25,8 +25,10 @@ def get_all(self): def get(self, key): hdrs = _headers(self._api_key) uri = self._config.get_latest_features_uri + '/' + key - r = self._session.get(uri, headers=hdrs, timeout=( - self._config.connect, self._config.read_timeout)) + r = self._session.get(uri, + headers=hdrs, + timeout=(self._config.connect_timeout, + self._config.read_timeout)) r.raise_for_status() feature = r.json() return feature diff --git a/ldclient/redis_requester.py b/ldclient/redis_requester.py deleted file mode 100644 index 74a2a352..00000000 --- a/ldclient/redis_requester.py +++ /dev/null @@ -1,57 +0,0 @@ -import json -from ldclient.expiringdict import ExpiringDict -from ldclient.interfaces import FeatureRequester -import redis - - -# noinspection PyUnusedLocal -def create_redis_ldd_requester(api_key, config, **kwargs): - return RedisLDDRequester(config, **kwargs) - - -class ForgetfulDict(dict): - - def __setitem__(self, key, value): - pass - - -class RedisLDDRequester(FeatureRequester): - """ - Requests features from redis, usually stored via the LaunchDarkly Daemon (LDD). Recommended to be combined - with the ExpiringInMemoryFeatureStore - """ - - def __init__(self, config, - expiration=15, - redis_host='localhost', - redis_port=6379, - redis_prefix='launchdarkly'): - """ - :type config: Config - """ - self._redis_host = redis_host - self._redis_port = redis_port - self._features_key = "{}:features".format(redis_prefix) - self._cache = ForgetfulDict() if expiration == 0 else ExpiringDict(max_len=config.capacity, - max_age_seconds=expiration) - self._pool = None - - def _get_connection(self): - if self._pool is None: - self._pool = redis.ConnectionPool( - host=self._redis_host, port=self._redis_port) - return redis.Redis(connection_pool=self._pool) - - def get(self, key, callback): - cached = self._cache.get(key) - if cached is not None: - return callback(cached) - else: - rd = self._get_connection() - raw = rd.hget(self._features_key, key) - if raw: - val = json.loads(raw.decode('utf-8')) - else: - val = None - self._cache[key] = val - return callback(val) diff --git a/ldclient/twisted_redis.py b/ldclient/twisted_redis.py deleted file mode 100644 index a0e81957..00000000 --- a/ldclient/twisted_redis.py +++ /dev/null @@ -1,67 +0,0 @@ -import json - -from twisted.internet import task, defer, protocol, reactor -from txredis.client import RedisClient - -from ldclient.interfaces import UpdateProcessor - - -# noinspection PyUnusedLocal -def create_redis_ldd_processor(api_key, config, store, **kwargs): - return TwistedRedisLDDStreamProcessor(store, **kwargs) - - -class TwistedRedisLDDStreamProcessor(UpdateProcessor): - def close(self): - pass - # TODO: implement - - def initialized(self): - pass - # TODO: implement - - def __init__(self, store, update_delay=15, redis_host='localhost', - redis_port=6379, - redis_prefix='launchdarkly'): - self._running = False - - if update_delay == 0: - update_delay = .5 - self._update_delay = update_delay - - self._store = store - """ :type: ldclient.interfaces.FeatureStore """ - - self._features_key = "{}:features".format(redis_prefix) - self._redis_host = redis_host - self._redis_port = redis_port - self._looping_call = None - - def start(self): - self._running = True - self._looping_call = task.LoopingCall(self._refresh) - self._looping_call.start(self._update_delay) - - def stop(self): - self._looping_call.stop() - - def is_alive(self): - return self._looping_call is not None and self._looping_call.running - - def _get_connection(self): - client_creator = protocol.ClientCreator(reactor, RedisClient) - return client_creator.connectTCP(self._redis_host, self._redis_port) - - @defer.inlineCallbacks - def _refresh(self): - redis = yield self._get_connection() - """ :type: RedisClient """ - result = yield redis.hgetall(self._features_key) - if result: - data = {} - for key, value in result.items(): - if value: - data[key] = json.loads(value.decode('utf-8')) - self._store.init(data) - else: - self._store.init({}) diff --git a/ldclient/twisted_sse.py b/ldclient/twisted_sse.py index f76bf3f8..b78c98ef 100644 --- a/ldclient/twisted_sse.py +++ b/ldclient/twisted_sse.py @@ -18,7 +18,7 @@ def getContext(self, *_): class TwistedSSEClient(object): def __init__(self, url, headers, verify_ssl, on_event): - self.url = url + "/features" + self.url = url self.verify_ssl = verify_ssl self.headers = headers self.on_event = on_event diff --git a/testing/server_util.py b/testing/server_util.py index a847b2a3..b2d3e629 100644 --- a/testing/server_util.py +++ b/testing/server_util.py @@ -150,7 +150,6 @@ def feed_forever(handler): if event: lines = "event: {event}\ndata: {data}\n\n".format(event=event.event, data=json.dumps(event.data)) - print("returning {}".format(lines)) handler.wfile.write(lines.encode('utf-8')) except Empty: pass diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index c9a9686e..e263692c 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -225,7 +225,6 @@ def test_defaults_and_online(): event_consumer_class=MockConsumer, feature_requester_class=MockFeatureRequester, feature_store=InMemoryFeatureStore())) actual = my_client.toggle('foo', user, default="originalDefault") - print(str(actual)) assert actual == expected assert wait_for_event(my_client, lambda e: e['kind'] == 'feature' and e['key'] == u'foo' and e['user'] == user) From 52073d880bd7d92286acd69705f7cfba77019716 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 13:42:44 -0700 Subject: [PATCH 16/42] Change client init logic to handle LDD mode. Make other things more consistent --- ldclient/client.py | 43 ++++++++++++++++++++++----------------- ldclient/interfaces.py | 9 +++++++- ldclient/noop.py | 10 --------- ldclient/polling.py | 4 ++-- ldclient/streaming.py | 5 +++-- ldclient/twisted_impls.py | 2 +- ldclient/version.py | 2 +- ldd/test_ldd.py | 7 +++++-- test-requirements.txt | 1 + 9 files changed, 45 insertions(+), 38 deletions(-) delete mode 100644 ldclient/noop.py diff --git a/ldclient/client.py b/ldclient/client.py index 9b5825f8..d13bb3f5 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -44,7 +44,7 @@ def __init__(self, poll_interval=1, use_ldd=False, feature_store=InMemoryFeatureStore(), - feature_requester_class=FeatureRequesterImpl, + feature_requester_class=None, event_consumer_class=None, offline=False): """ @@ -66,22 +66,15 @@ def __init__(self, self.get_latest_features_uri = self.base_uri + GET_LATEST_FEATURES_PATH self.events_uri = events_uri.rstrip('\\') + '/bulk' self.stream_uri = stream_uri.rstrip('\\') + STREAM_FEATURES_PATH - - if update_processor_class: - self.update_processor_class = update_processor_class - else: - if stream: - self.update_processor_class = StreamingUpdateProcessor - else: - self.update_processor_class = PollingUpdateProcessor - + self.update_processor_class = update_processor_class + self.stream = stream if poll_interval < 1: poll_interval = 1 self.poll_interval = poll_interval self.use_ldd = use_ldd self.feature_store = feature_store self.event_consumer_class = EventConsumerImpl if not event_consumer_class else event_consumer_class - self.feature_requester_class = FeatureRequesterImpl if not feature_requester_class else feature_requester_class + self.feature_requester_class = feature_requester_class self.connect_timeout = connect_timeout self.read_timeout = read_timeout self.events_enabled = events_enabled @@ -112,14 +105,6 @@ def __init__(self, api_key, config=None, start_wait=5): self._store = self._config.feature_store """ :type: FeatureStore """ - self._feature_requester = self._config.feature_requester_class( - api_key, self._config) - """ :type: FeatureRequester """ - - self._update_processor = self._config.update_processor_class( - api_key, self._config, self._feature_requester, self._store) - """ :type: UpdateProcessor """ - if self._config.offline: log.info("Started LaunchDarkly Client in offline mode") return @@ -131,6 +116,25 @@ def __init__(self, api_key, config=None, start_wait=5): log.error("LDD mode requires a RedisFeatureStore.") return + if self._config.feature_requester_class: + self._feature_requester = self._config.feature_requester_class( + api_key, self._config) + else: + self._feature_requester = FeatureRequesterImpl(api_key, self._config) + """ :type: FeatureRequester """ + + if self._config.update_processor_class: + self._update_processor = self._config.update_processor_class( + api_key, self._config, self._feature_requester, self._store) + else: + if self._config.stream: + self._update_processor = StreamingUpdateProcessor( + api_key, self._config, self._feature_requester, self._store) + else: + self._update_processor = PollingUpdateProcessor( + api_key, self._config, self._feature_requester, self._store) + """ :type: UpdateProcessor """ + start_time = time.time() self._update_processor.start() while not self._update_processor.initialized(): @@ -230,4 +234,5 @@ def _sanitize_user(self, user): if 'key' in user: user['key'] = str(user['key']) + __all__ = ['LDClient', 'Config'] diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 26a991ca..d6504503 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -99,6 +99,12 @@ class UpdateProcessor(BackgroundOperation): """ __metaclass__ = ABCMeta + def initialized(self): + """ + Returns whether the update processor has received feature flags and has initialized its feature store. + :rtype: bool + """ + class EventConsumer(BackgroundOperation): """ @@ -119,14 +125,15 @@ class FeatureRequester(object): """ __metaclass__ = ABCMeta - @abstractmethod def get_all(self): """ Gets all feature flags. """ + pass def get_one(self, key): """ Gets one Feature flag :return: """ + pass diff --git a/ldclient/noop.py b/ldclient/noop.py deleted file mode 100644 index 4b497088..00000000 --- a/ldclient/noop.py +++ /dev/null @@ -1,10 +0,0 @@ -from ldclient.interfaces import FeatureRequester - - -class NoOpFeatureRequester(FeatureRequester): - - def __init__(self, *_): - pass - - def get(self, key, callback): - return None diff --git a/ldclient/polling.py b/ldclient/polling.py index ad120a75..3dd0712e 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -17,7 +17,7 @@ def __init__(self, api_key, config, requester, store): def run(self): if not self._running: - log.debug("Starting PollingUpdateProcessor with request interval: " + str(self._config.poll_interval)) + log.info("Starting PollingUpdateProcessor with request interval: " + str(self._config.poll_interval)) self._running = True while self._running: start_time = time.time() @@ -30,5 +30,5 @@ def initialized(self): return self._running and self._store.initialized def stop(self): - log.debug("Stopping PollingUpdateProcessor") + log.info("Stopping PollingUpdateProcessor") self._running = False diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 93a8a6db..6f978931 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -19,7 +19,7 @@ def __init__(self, api_key, config, requester, store): self._running = False def run(self): - log.debug("Starting StreamingUpdateProcessor") + log.info("Starting StreamingUpdateProcessor") self._running = True hdrs = _stream_headers(self._api_key) uri = self._config.stream_uri @@ -30,10 +30,11 @@ def run(self): self.process_message(self._store, self._requester, msg) def stop(self): + log.info("Stopping StreamingUpdateProcessor") self._running = False def initialized(self): - return self._running + return self._running and self._store.initialized @staticmethod def process_message(store, requester, msg): diff --git a/ldclient/twisted_impls.py b/ldclient/twisted_impls.py index 018724c0..d377454d 100644 --- a/ldclient/twisted_impls.py +++ b/ldclient/twisted_impls.py @@ -90,7 +90,7 @@ def initialized(self): return self._store.initialized() def is_alive(self): - return self.running + return self.running and self._store.initialized() class TwistedEventConsumer(EventConsumer): diff --git a/ldclient/version.py b/ldclient/version.py index c95652e6..3277f64c 100644 --- a/ldclient/version.py +++ b/ldclient/version.py @@ -1 +1 @@ -VERSION = "0.20.3" +VERSION = "1.0.0" diff --git a/ldd/test_ldd.py b/ldd/test_ldd.py index 060f760b..e661d88d 100644 --- a/ldd/test_ldd.py +++ b/ldd/test_ldd.py @@ -1,10 +1,12 @@ from functools import partial import sys + +from ldclient.redis_feature_store import RedisFeatureStore + sys.path.append("..") sys.path.append("../testing") from ldclient.util import Event -from ldclient.redis_requester import create_redis_ldd_requester import logging from ldclient.client import Config, LDClient import pytest @@ -27,7 +29,8 @@ def fin(): def test_sse_init(stream): stream.queue.put(Event(event="put", data=feature("foo", "jim"))) - client = LDClient("apikey", Config(feature_requester_class=partial(create_redis_ldd_requester, expiration=0), + client = LDClient("apikey", Config(use_ldd=True, + feature_store=RedisFeatureStore(), events_enabled=False)) wait_until(lambda: client.toggle( "foo", user('xyz'), "blah") == "jim", timeout=10) diff --git a/test-requirements.txt b/test-requirements.txt index 659418bc..1e455c0c 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,3 +1,4 @@ pytest>=2.8 pytest-twisted==1.5 pytest-timeout>=1.0 +redis>=2.10.5 From 328a9eb45b12ed2b53b785a0c589161f8f5e0b2c Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 15:18:08 -0700 Subject: [PATCH 17/42] Add debug logging --- ldclient/feature_store.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 49790fda..6d1fc2ca 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -14,7 +14,11 @@ def get(self, key): try: self._lock.rlock() f = self._features.get(key) - if f is None or 'deleted' in f and f['deleted']: + if f is None: + log.debug("Attempted to get missing feature: " + str(key) + ". Returning None") + return None + if 'deleted' in f and f['deleted']: + log.debug("Attempted to retrieve deleted feature: " + str(key) + ". Returning None") return None return f finally: From b694fe6c5bc558fc53b586c7d409b442fcae7599 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 15:24:13 -0700 Subject: [PATCH 18/42] Add debug logging --- ldclient/feature_store.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 6d1fc2ca..8bb274f6 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -13,6 +13,7 @@ def __init__(self): def get(self, key): try: self._lock.rlock() + log.debug("All keys in feature store: " + str(self._features.keys())) f = self._features.get(key) if f is None: log.debug("Attempted to get missing feature: " + str(key) + ". Returning None") From e0e9faedc706db6ec5428dce53995eb68451598c Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 15:31:24 -0700 Subject: [PATCH 19/42] Update deps --- requirements.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index 5295651d..7b7372bc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -CacheControl>=0.10.2 -requests>=2.4.0 -future>=0.14.3 -sseclient>=0.0.9 \ No newline at end of file +CacheControl>=0.11.6 +requests>=2.10.0 +future>=0.15.2 +sseclient>=0.0.12 \ No newline at end of file From 249162fbd5d0ec1a5198f9af13ad655826c988f6 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 15:50:34 -0700 Subject: [PATCH 20/42] bump version. Add more debug logging. --- ldclient/feature_store.py | 3 ++- setup.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 8bb274f6..4b0ebefe 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -13,7 +13,7 @@ def __init__(self): def get(self, key): try: self._lock.rlock() - log.debug("All keys in feature store: " + str(self._features.keys())) + log.debug("Feature store contents: " + str(self._features)) f = self._features.get(key) if f is None: log.debug("Attempted to get missing feature: " + str(key) + ". Returning None") @@ -38,6 +38,7 @@ def init(self, features): self._features = dict(features) self._initialized = True log.debug("Initialized feature store with " + str(len(features)) + " features") + log.debug("Feature store contents: " + str(self._features)) finally: self._lock.unlock() diff --git a/setup.py b/setup.py index 0f700076..49b2f794 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ def run(self): setup( name='ldclient-py', - version='0.20.3', + version='1.0.0', author='Catamorphic Co.', author_email='team@catamorphic.com', packages=['ldclient'], From 567baed853bde701dab3d4e8dec5d5d1c7c3b0df Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 16:13:54 -0700 Subject: [PATCH 21/42] temporary- make inmem feature store look like master --- ldclient/client.py | 67 ++++++++++++++++++ ldclient/feature_store.py | 144 +++++++++++++++++++------------------- 2 files changed, 139 insertions(+), 72 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index d13bb3f5..881620fe 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -12,6 +12,8 @@ from ldclient.polling import PollingUpdateProcessor from ldclient.streaming import StreamingUpdateProcessor from ldclient.util import check_uwsgi, _evaluate, log +from ldclient.interfaces import FeatureStore +from ldclient.rwlock import ReadWriteLock # noinspection PyBroadException try: @@ -92,6 +94,71 @@ def default(cls): return cls() +class InMemoryFeatureStore(FeatureStore): + + def __init__(self): + self._lock = ReadWriteLock() + self._initialized = False + self._features = {} + + def get(self, key): + try: + self._lock.rlock() + f = self._features.get(key) + if f is None or 'deleted' in f and f['deleted']: + return None + return f + finally: + self._lock.runlock() + + def all(self): + try: + self._lock.rlock() + return dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted']) + finally: + self._lock.runlock() + + def init(self, features): + try: + self._lock.lock() + self._features = dict(features) + self._initialized = True + finally: + self._lock.unlock() + + # noinspection PyShadowingNames + def delete(self, key, version): + try: + self._lock.lock() + f = self._features.get(key) + if f is not None and f['version'] < version: + f['deleted'] = True + f['version'] = version + elif f is None: + f = {'deleted': True, 'version': version} + self._features[key] = f + finally: + self._lock.unlock() + + def upsert(self, key, feature): + try: + self._lock.lock() + f = self._features.get(key) + if f is None or f['version'] < feature['version']: + self._features[key] = feature + log.debug("Updated feature {} to version {}".format(key, feature['version'])) + finally: + self._lock.unlock() + + @property + def initialized(self): + try: + self._lock.rlock() + return self._initialized + finally: + self._lock.runlock() + + class LDClient(object): def __init__(self, api_key, config=None, start_wait=5): check_uwsgi() diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 4b0ebefe..5aecb2e2 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -2,75 +2,75 @@ from ldclient.interfaces import FeatureStore from ldclient.rwlock import ReadWriteLock - -class InMemoryFeatureStore(FeatureStore): - - def __init__(self): - self._lock = ReadWriteLock() - self._initialized = False - self._features = {} - - def get(self, key): - try: - self._lock.rlock() - log.debug("Feature store contents: " + str(self._features)) - f = self._features.get(key) - if f is None: - log.debug("Attempted to get missing feature: " + str(key) + ". Returning None") - return None - if 'deleted' in f and f['deleted']: - log.debug("Attempted to retrieve deleted feature: " + str(key) + ". Returning None") - return None - return f - finally: - self._lock.runlock() - - def all(self): - try: - self._lock.rlock() - return dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted']) - finally: - self._lock.runlock() - - def init(self, features): - try: - self._lock.lock() - self._features = dict(features) - self._initialized = True - log.debug("Initialized feature store with " + str(len(features)) + " features") - log.debug("Feature store contents: " + str(self._features)) - finally: - self._lock.unlock() - - # noinspection PyShadowingNames - def delete(self, key, version): - try: - self._lock.lock() - f = self._features.get(key) - if f is not None and f['version'] < version: - f['deleted'] = True - f['version'] = version - elif f is None: - f = {'deleted': True, 'version': version} - self._features[key] = f - finally: - self._lock.unlock() - - def upsert(self, key, feature): - try: - self._lock.lock() - f = self._features.get(key) - if f is None or f['version'] < feature['version']: - self._features[key] = feature - log.debug("Updated feature {} to version {}".format(key, feature['version'])) - finally: - self._lock.unlock() - - - @property - def initialized(self): - try: - self._lock.rlock() - return self._initialized - finally: - self._lock.runlock() \ No newline at end of file +# +# class InMemoryFeatureStore(FeatureStore): +# +# def __init__(self): +# self._lock = ReadWriteLock() +# self._initialized = False +# self._features = {} +# +# def get(self, key): +# try: +# self._lock.rlock() +# log.debug("Feature store contents: " + str(self._features)) +# f = self._features.get(key) +# if f is None: +# log.debug("Attempted to get missing feature: " + str(key) + ". Returning None") +# return None +# if 'deleted' in f and f['deleted']: +# log.debug("Attempted to retrieve deleted feature: " + str(key) + ". Returning None") +# return None +# return f +# finally: +# self._lock.runlock() +# +# def all(self): +# try: +# self._lock.rlock() +# return dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted']) +# finally: +# self._lock.runlock() +# +# def init(self, features): +# try: +# self._lock.lock() +# self._features = dict(features) +# self._initialized = True +# log.debug("Initialized feature store with " + str(len(features)) + " features") +# log.debug("Feature store contents: " + str(self._features)) +# finally: +# self._lock.unlock() +# +# # noinspection PyShadowingNames +# def delete(self, key, version): +# try: +# self._lock.lock() +# f = self._features.get(key) +# if f is not None and f['version'] < version: +# f['deleted'] = True +# f['version'] = version +# elif f is None: +# f = {'deleted': True, 'version': version} +# self._features[key] = f +# finally: +# self._lock.unlock() +# +# def upsert(self, key, feature): +# try: +# self._lock.lock() +# f = self._features.get(key) +# if f is None or f['version'] < feature['version']: +# self._features[key] = feature +# log.debug("Updated feature {} to version {}".format(key, feature['version'])) +# finally: +# self._lock.unlock() +# +# +# @property +# def initialized(self): +# try: +# self._lock.rlock() +# return self._initialized +# finally: +# self._lock.runlock() \ No newline at end of file From 5fb3f5983878222cba7ce37b85f6f06b1af27c9d Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 16:16:50 -0700 Subject: [PATCH 22/42] fix bad import --- ldclient/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldclient/client.py b/ldclient/client.py index 881620fe..90875208 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -7,7 +7,7 @@ from ldclient.event_consumer import EventConsumerImpl from ldclient.feature_requester import FeatureRequesterImpl -from ldclient.feature_store import InMemoryFeatureStore +# from ldclient.feature_store import InMemoryFeatureStore from ldclient.interfaces import FeatureStore from ldclient.polling import PollingUpdateProcessor from ldclient.streaming import StreamingUpdateProcessor From 79a6b1ca59a9606429854d496d5acce5afbde8d9 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 16:21:38 -0700 Subject: [PATCH 23/42] Revert --- ldclient/client.py | 69 +----------------- ldclient/feature_store.py | 144 +++++++++++++++++++------------------- 2 files changed, 73 insertions(+), 140 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 90875208..d13bb3f5 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -7,13 +7,11 @@ from ldclient.event_consumer import EventConsumerImpl from ldclient.feature_requester import FeatureRequesterImpl -# from ldclient.feature_store import InMemoryFeatureStore +from ldclient.feature_store import InMemoryFeatureStore from ldclient.interfaces import FeatureStore from ldclient.polling import PollingUpdateProcessor from ldclient.streaming import StreamingUpdateProcessor from ldclient.util import check_uwsgi, _evaluate, log -from ldclient.interfaces import FeatureStore -from ldclient.rwlock import ReadWriteLock # noinspection PyBroadException try: @@ -94,71 +92,6 @@ def default(cls): return cls() -class InMemoryFeatureStore(FeatureStore): - - def __init__(self): - self._lock = ReadWriteLock() - self._initialized = False - self._features = {} - - def get(self, key): - try: - self._lock.rlock() - f = self._features.get(key) - if f is None or 'deleted' in f and f['deleted']: - return None - return f - finally: - self._lock.runlock() - - def all(self): - try: - self._lock.rlock() - return dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted']) - finally: - self._lock.runlock() - - def init(self, features): - try: - self._lock.lock() - self._features = dict(features) - self._initialized = True - finally: - self._lock.unlock() - - # noinspection PyShadowingNames - def delete(self, key, version): - try: - self._lock.lock() - f = self._features.get(key) - if f is not None and f['version'] < version: - f['deleted'] = True - f['version'] = version - elif f is None: - f = {'deleted': True, 'version': version} - self._features[key] = f - finally: - self._lock.unlock() - - def upsert(self, key, feature): - try: - self._lock.lock() - f = self._features.get(key) - if f is None or f['version'] < feature['version']: - self._features[key] = feature - log.debug("Updated feature {} to version {}".format(key, feature['version'])) - finally: - self._lock.unlock() - - @property - def initialized(self): - try: - self._lock.rlock() - return self._initialized - finally: - self._lock.runlock() - - class LDClient(object): def __init__(self, api_key, config=None, start_wait=5): check_uwsgi() diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 5aecb2e2..4b0ebefe 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -2,75 +2,75 @@ from ldclient.interfaces import FeatureStore from ldclient.rwlock import ReadWriteLock -# -# class InMemoryFeatureStore(FeatureStore): -# -# def __init__(self): -# self._lock = ReadWriteLock() -# self._initialized = False -# self._features = {} -# -# def get(self, key): -# try: -# self._lock.rlock() -# log.debug("Feature store contents: " + str(self._features)) -# f = self._features.get(key) -# if f is None: -# log.debug("Attempted to get missing feature: " + str(key) + ". Returning None") -# return None -# if 'deleted' in f and f['deleted']: -# log.debug("Attempted to retrieve deleted feature: " + str(key) + ". Returning None") -# return None -# return f -# finally: -# self._lock.runlock() -# -# def all(self): -# try: -# self._lock.rlock() -# return dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted']) -# finally: -# self._lock.runlock() -# -# def init(self, features): -# try: -# self._lock.lock() -# self._features = dict(features) -# self._initialized = True -# log.debug("Initialized feature store with " + str(len(features)) + " features") -# log.debug("Feature store contents: " + str(self._features)) -# finally: -# self._lock.unlock() -# -# # noinspection PyShadowingNames -# def delete(self, key, version): -# try: -# self._lock.lock() -# f = self._features.get(key) -# if f is not None and f['version'] < version: -# f['deleted'] = True -# f['version'] = version -# elif f is None: -# f = {'deleted': True, 'version': version} -# self._features[key] = f -# finally: -# self._lock.unlock() -# -# def upsert(self, key, feature): -# try: -# self._lock.lock() -# f = self._features.get(key) -# if f is None or f['version'] < feature['version']: -# self._features[key] = feature -# log.debug("Updated feature {} to version {}".format(key, feature['version'])) -# finally: -# self._lock.unlock() -# -# -# @property -# def initialized(self): -# try: -# self._lock.rlock() -# return self._initialized -# finally: -# self._lock.runlock() \ No newline at end of file + +class InMemoryFeatureStore(FeatureStore): + + def __init__(self): + self._lock = ReadWriteLock() + self._initialized = False + self._features = {} + + def get(self, key): + try: + self._lock.rlock() + log.debug("Feature store contents: " + str(self._features)) + f = self._features.get(key) + if f is None: + log.debug("Attempted to get missing feature: " + str(key) + ". Returning None") + return None + if 'deleted' in f and f['deleted']: + log.debug("Attempted to retrieve deleted feature: " + str(key) + ". Returning None") + return None + return f + finally: + self._lock.runlock() + + def all(self): + try: + self._lock.rlock() + return dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted']) + finally: + self._lock.runlock() + + def init(self, features): + try: + self._lock.lock() + self._features = dict(features) + self._initialized = True + log.debug("Initialized feature store with " + str(len(features)) + " features") + log.debug("Feature store contents: " + str(self._features)) + finally: + self._lock.unlock() + + # noinspection PyShadowingNames + def delete(self, key, version): + try: + self._lock.lock() + f = self._features.get(key) + if f is not None and f['version'] < version: + f['deleted'] = True + f['version'] = version + elif f is None: + f = {'deleted': True, 'version': version} + self._features[key] = f + finally: + self._lock.unlock() + + def upsert(self, key, feature): + try: + self._lock.lock() + f = self._features.get(key) + if f is None or f['version'] < feature['version']: + self._features[key] = feature + log.debug("Updated feature {} to version {}".format(key, feature['version'])) + finally: + self._lock.unlock() + + + @property + def initialized(self): + try: + self._lock.rlock() + return self._initialized + finally: + self._lock.runlock() \ No newline at end of file From 40c1e26240c76bb9e0057ab2a268d2b03d901794 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 16:38:28 -0700 Subject: [PATCH 24/42] Change feature store so it lives in config --- ldclient/client.py | 13 +++++-------- ldclient/polling.py | 7 +++---- ldclient/streaming.py | 9 ++++----- 3 files changed, 12 insertions(+), 17 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index d13bb3f5..f564094a 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -102,15 +102,12 @@ def __init__(self, api_key, config=None, start_wait=5): self._event_consumer = None self._lock = Lock() - self._store = self._config.feature_store - """ :type: FeatureStore """ - if self._config.offline: log.info("Started LaunchDarkly Client in offline mode") return if self._config.use_ldd: - if self._store.__class__ == "RedisFeatureStore": + if self._config.feature_store.__class__ == "RedisFeatureStore": log.info("Started LaunchDarkly Client in LDD mode") return log.error("LDD mode requires a RedisFeatureStore.") @@ -125,14 +122,14 @@ def __init__(self, api_key, config=None, start_wait=5): if self._config.update_processor_class: self._update_processor = self._config.update_processor_class( - api_key, self._config, self._feature_requester, self._store) + api_key, self._config, self._feature_requester) else: if self._config.stream: self._update_processor = StreamingUpdateProcessor( - api_key, self._config, self._feature_requester, self._store) + api_key, self._config, self._feature_requester) else: self._update_processor = PollingUpdateProcessor( - api_key, self._config, self._feature_requester, self._store) + api_key, self._config, self._feature_requester) """ :type: UpdateProcessor """ start_time = time.time() @@ -209,7 +206,7 @@ def send_event(value): self._sanitize_user(user) if 'key' in user and user['key']: - feature = self._store.get(key) + feature = self._config.feature_store.get(key) else: send_event(default) log.warning("Missing or empty User key when evaluating Feature Flag key: " + key + ". Returning default.") diff --git a/ldclient/polling.py b/ldclient/polling.py index 3dd0712e..41d49bb3 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -6,13 +6,12 @@ class PollingUpdateProcessor(Thread, UpdateProcessor): - def __init__(self, api_key, config, requester, store): + def __init__(self, api_key, config, requester): Thread.__init__(self) self.daemon = True self._api_key = api_key self._config = config self._requester = requester - self._store = store self._running = False def run(self): @@ -21,13 +20,13 @@ def run(self): self._running = True while self._running: start_time = time.time() - self._store.init(self._requester.get_all()) + self._config.feature_store.init(self._requester.get_all()) elapsed = time.time() - start_time if elapsed < self._config.poll_interval: time.sleep(self._config.poll_interval - elapsed) def initialized(self): - return self._running and self._store.initialized + return self._running and self._config.feature_store.initialized def stop(self): log.info("Stopping PollingUpdateProcessor") diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 6f978931..529dc040 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -9,13 +9,12 @@ class StreamingUpdateProcessor(Thread, UpdateProcessor): - def __init__(self, api_key, config, requester, store): + def __init__(self, api_key, config, requester): Thread.__init__(self) self.daemon = True self._api_key = api_key self._config = config self._requester = requester - self._store = store self._running = False def run(self): @@ -27,14 +26,14 @@ def run(self): for msg in messages: if not self._running: break - self.process_message(self._store, self._requester, msg) + self.process_message(self._config.feature_store, self._requester, msg) def stop(self): log.info("Stopping StreamingUpdateProcessor") self._running = False def initialized(self): - return self._running and self._store.initialized + return self._running and self._config.feature_store.initialized @staticmethod def process_message(store, requester, msg): @@ -58,4 +57,4 @@ def process_message(store, requester, msg): version = payload['version'] store.delete(key, version) else: - log.warning('Unhandled event in stream processor: ' + msg.event) \ No newline at end of file + log.warning('Unhandled event in stream processor: ' + msg.event) From 851083795c9eac714be4e7bd80ac2641a534439c Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 16:41:13 -0700 Subject: [PATCH 25/42] Remove startup wait. --- ldclient/client.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index f564094a..3cd206bd 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -134,11 +134,11 @@ def __init__(self, api_key, config=None, start_wait=5): start_time = time.time() self._update_processor.start() - while not self._update_processor.initialized(): - if time.time() - start_time > start_wait: - log.warn("Timeout encountered waiting for LaunchDarkly Client initialization") - return - time.sleep(0.1) + # while not self._update_processor.initialized(): + # if time.time() - start_time > start_wait: + # log.warn("Timeout encountered waiting for LaunchDarkly Client initialization") + # return + # time.sleep(0.1) log.info("Started LaunchDarkly Client") From a429301adbce29256ed85cff0bba54b9a422c0dc Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 16:50:55 -0700 Subject: [PATCH 26/42] add log statements --- ldclient/client.py | 3 +++ ldclient/streaming.py | 2 ++ 2 files changed, 5 insertions(+) diff --git a/ldclient/client.py b/ldclient/client.py index 3cd206bd..0a75ec94 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -102,6 +102,8 @@ def __init__(self, api_key, config=None, start_wait=5): self._event_consumer = None self._lock = Lock() + log.debug("store id: " + str(id(self._config.feature_store))) + if self._config.offline: log.info("Started LaunchDarkly Client in offline mode") return @@ -206,6 +208,7 @@ def send_event(value): self._sanitize_user(user) if 'key' in user and user['key']: + log.debug("store id: " + str(id(self._config.feature_store))) feature = self._config.feature_store.get(key) else: send_event(default) diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 529dc040..967011ce 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -26,6 +26,7 @@ def run(self): for msg in messages: if not self._running: break + log.debug("store id: " + str(id(self._config.feature_store))) self.process_message(self._config.feature_store, self._requester, msg) def stop(self): @@ -37,6 +38,7 @@ def initialized(self): @staticmethod def process_message(store, requester, msg): + log.debug("store id: " + str(id(store))) payload = json.loads(msg.data) log.debug("Received stream event {}".format(msg.event)) if msg.event == 'put': From 778ec2ec8ba6cadb04a887929f442581acfeeea0 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 17:27:20 -0700 Subject: [PATCH 27/42] move feature store init --- ldclient/client.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 0a75ec94..619ee3b9 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -43,7 +43,7 @@ def __init__(self, update_processor_class=None, poll_interval=1, use_ldd=False, - feature_store=InMemoryFeatureStore(), + feature_store=None, feature_requester_class=None, event_consumer_class=None, offline=False): @@ -72,7 +72,7 @@ def __init__(self, poll_interval = 1 self.poll_interval = poll_interval self.use_ldd = use_ldd - self.feature_store = feature_store + self.feature_store = InMemoryFeatureStore() self.event_consumer_class = EventConsumerImpl if not event_consumer_class else event_consumer_class self.feature_requester_class = feature_requester_class self.connect_timeout = connect_timeout @@ -209,6 +209,8 @@ def send_event(value): if 'key' in user and user['key']: log.debug("store id: " + str(id(self._config.feature_store))) + log.debug("Feature store contents: " + str(self._config.feature_store._features)) + feature = self._config.feature_store.get(key) else: send_event(default) From 50b1e738c51186f8927e761fd28066b8e3045f89 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 17:34:25 -0700 Subject: [PATCH 28/42] more logging --- ldclient/feature_store.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 4b0ebefe..209649d3 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -63,6 +63,7 @@ def upsert(self, key, feature): if f is None or f['version'] < feature['version']: self._features[key] = feature log.debug("Updated feature {} to version {}".format(key, feature['version'])) + log.debug("Feature store contents: " + str(self._features)) finally: self._lock.unlock() From d85ccbace0832f2e027a09d5791a8b1abecf53e3 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 17:40:06 -0700 Subject: [PATCH 29/42] maybe --- ldclient/client.py | 4 ++-- ldclient/interfaces.py | 2 ++ ldclient/streaming.py | 3 +++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 619ee3b9..cf2244a3 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -209,9 +209,9 @@ def send_event(value): if 'key' in user and user['key']: log.debug("store id: " + str(id(self._config.feature_store))) - log.debug("Feature store contents: " + str(self._config.feature_store._features)) + # log.debug("Feature store contents: " + str(self._config.feature_store._features)) - feature = self._config.feature_store.get(key) + feature = self._update_processor.get(key) else: send_event(default) log.warning("Missing or empty User key when evaluating Feature Flag key: " + key + ". Returning default.") diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index d6504503..1fd5d7a4 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -104,6 +104,8 @@ def initialized(self): Returns whether the update processor has received feature flags and has initialized its feature store. :rtype: bool """ + def get(self, key): + pass class EventConsumer(BackgroundOperation): diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 967011ce..0e35199a 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -36,6 +36,9 @@ def stop(self): def initialized(self): return self._running and self._config.feature_store.initialized + def get(self, key): + return self._config.feature_store.get(key) + @staticmethod def process_message(store, requester, msg): log.debug("store id: " + str(id(store))) From 23547c1f1d200adbf479e71b710ae51cced55849 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Sun, 3 Jul 2016 20:36:40 -0700 Subject: [PATCH 30/42] Revert some things that didn't work. --- ldclient/client.py | 30 ++++++++++++++---------------- ldclient/feature_store.py | 3 --- ldclient/interfaces.py | 2 -- ldclient/polling.py | 7 ++++--- ldclient/streaming.py | 14 +++++--------- requirements.txt | 8 ++++---- setup.py | 2 +- 7 files changed, 28 insertions(+), 38 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index cf2244a3..d13bb3f5 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -43,7 +43,7 @@ def __init__(self, update_processor_class=None, poll_interval=1, use_ldd=False, - feature_store=None, + feature_store=InMemoryFeatureStore(), feature_requester_class=None, event_consumer_class=None, offline=False): @@ -72,7 +72,7 @@ def __init__(self, poll_interval = 1 self.poll_interval = poll_interval self.use_ldd = use_ldd - self.feature_store = InMemoryFeatureStore() + self.feature_store = feature_store self.event_consumer_class = EventConsumerImpl if not event_consumer_class else event_consumer_class self.feature_requester_class = feature_requester_class self.connect_timeout = connect_timeout @@ -102,14 +102,15 @@ def __init__(self, api_key, config=None, start_wait=5): self._event_consumer = None self._lock = Lock() - log.debug("store id: " + str(id(self._config.feature_store))) + self._store = self._config.feature_store + """ :type: FeatureStore """ if self._config.offline: log.info("Started LaunchDarkly Client in offline mode") return if self._config.use_ldd: - if self._config.feature_store.__class__ == "RedisFeatureStore": + if self._store.__class__ == "RedisFeatureStore": log.info("Started LaunchDarkly Client in LDD mode") return log.error("LDD mode requires a RedisFeatureStore.") @@ -124,23 +125,23 @@ def __init__(self, api_key, config=None, start_wait=5): if self._config.update_processor_class: self._update_processor = self._config.update_processor_class( - api_key, self._config, self._feature_requester) + api_key, self._config, self._feature_requester, self._store) else: if self._config.stream: self._update_processor = StreamingUpdateProcessor( - api_key, self._config, self._feature_requester) + api_key, self._config, self._feature_requester, self._store) else: self._update_processor = PollingUpdateProcessor( - api_key, self._config, self._feature_requester) + api_key, self._config, self._feature_requester, self._store) """ :type: UpdateProcessor """ start_time = time.time() self._update_processor.start() - # while not self._update_processor.initialized(): - # if time.time() - start_time > start_wait: - # log.warn("Timeout encountered waiting for LaunchDarkly Client initialization") - # return - # time.sleep(0.1) + while not self._update_processor.initialized(): + if time.time() - start_time > start_wait: + log.warn("Timeout encountered waiting for LaunchDarkly Client initialization") + return + time.sleep(0.1) log.info("Started LaunchDarkly Client") @@ -208,10 +209,7 @@ def send_event(value): self._sanitize_user(user) if 'key' in user and user['key']: - log.debug("store id: " + str(id(self._config.feature_store))) - # log.debug("Feature store contents: " + str(self._config.feature_store._features)) - - feature = self._update_processor.get(key) + feature = self._store.get(key) else: send_event(default) log.warning("Missing or empty User key when evaluating Feature Flag key: " + key + ". Returning default.") diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 209649d3..6d1fc2ca 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -13,7 +13,6 @@ def __init__(self): def get(self, key): try: self._lock.rlock() - log.debug("Feature store contents: " + str(self._features)) f = self._features.get(key) if f is None: log.debug("Attempted to get missing feature: " + str(key) + ". Returning None") @@ -38,7 +37,6 @@ def init(self, features): self._features = dict(features) self._initialized = True log.debug("Initialized feature store with " + str(len(features)) + " features") - log.debug("Feature store contents: " + str(self._features)) finally: self._lock.unlock() @@ -63,7 +61,6 @@ def upsert(self, key, feature): if f is None or f['version'] < feature['version']: self._features[key] = feature log.debug("Updated feature {} to version {}".format(key, feature['version'])) - log.debug("Feature store contents: " + str(self._features)) finally: self._lock.unlock() diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 1fd5d7a4..d6504503 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -104,8 +104,6 @@ def initialized(self): Returns whether the update processor has received feature flags and has initialized its feature store. :rtype: bool """ - def get(self, key): - pass class EventConsumer(BackgroundOperation): diff --git a/ldclient/polling.py b/ldclient/polling.py index 41d49bb3..3dd0712e 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -6,12 +6,13 @@ class PollingUpdateProcessor(Thread, UpdateProcessor): - def __init__(self, api_key, config, requester): + def __init__(self, api_key, config, requester, store): Thread.__init__(self) self.daemon = True self._api_key = api_key self._config = config self._requester = requester + self._store = store self._running = False def run(self): @@ -20,13 +21,13 @@ def run(self): self._running = True while self._running: start_time = time.time() - self._config.feature_store.init(self._requester.get_all()) + self._store.init(self._requester.get_all()) elapsed = time.time() - start_time if elapsed < self._config.poll_interval: time.sleep(self._config.poll_interval - elapsed) def initialized(self): - return self._running and self._config.feature_store.initialized + return self._running and self._store.initialized def stop(self): log.info("Stopping PollingUpdateProcessor") diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 0e35199a..6f978931 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -9,12 +9,13 @@ class StreamingUpdateProcessor(Thread, UpdateProcessor): - def __init__(self, api_key, config, requester): + def __init__(self, api_key, config, requester, store): Thread.__init__(self) self.daemon = True self._api_key = api_key self._config = config self._requester = requester + self._store = store self._running = False def run(self): @@ -26,22 +27,17 @@ def run(self): for msg in messages: if not self._running: break - log.debug("store id: " + str(id(self._config.feature_store))) - self.process_message(self._config.feature_store, self._requester, msg) + self.process_message(self._store, self._requester, msg) def stop(self): log.info("Stopping StreamingUpdateProcessor") self._running = False def initialized(self): - return self._running and self._config.feature_store.initialized - - def get(self, key): - return self._config.feature_store.get(key) + return self._running and self._store.initialized @staticmethod def process_message(store, requester, msg): - log.debug("store id: " + str(id(store))) payload = json.loads(msg.data) log.debug("Received stream event {}".format(msg.event)) if msg.event == 'put': @@ -62,4 +58,4 @@ def process_message(store, requester, msg): version = payload['version'] store.delete(key, version) else: - log.warning('Unhandled event in stream processor: ' + msg.event) + log.warning('Unhandled event in stream processor: ' + msg.event) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 7b7372bc..5295651d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -CacheControl>=0.11.6 -requests>=2.10.0 -future>=0.15.2 -sseclient>=0.0.12 \ No newline at end of file +CacheControl>=0.10.2 +requests>=2.4.0 +future>=0.14.3 +sseclient>=0.0.9 \ No newline at end of file diff --git a/setup.py b/setup.py index 49b2f794..0f700076 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ def run(self): setup( name='ldclient-py', - version='1.0.0', + version='0.20.3', author='Catamorphic Co.', author_email='team@catamorphic.com', packages=['ldclient'], From 711073050f753c6160644f42de42f5d5f8a2f136 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Mon, 4 Jul 2016 09:59:41 -0700 Subject: [PATCH 31/42] Attempt to make event consumer behavior more consistent. temporarily disable waiting for startup. --- ldclient/client.py | 35 ++++++++++++++++------------------- testing/test_ldclient.py | 5 ----- 2 files changed, 16 insertions(+), 24 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index d13bb3f5..0eecec03 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -109,6 +109,11 @@ def __init__(self, api_key, config=None, start_wait=5): log.info("Started LaunchDarkly Client in offline mode") return + if self._config.events_enabled: + self._event_consumer = self._config.event_consumer_class( + self._queue, self._api_key, self._config) + self._event_consumer.start() + if self._config.use_ldd: if self._store.__class__ == "RedisFeatureStore": log.info("Started LaunchDarkly Client in LDD mode") @@ -137,11 +142,12 @@ def __init__(self, api_key, config=None, start_wait=5): start_time = time.time() self._update_processor.start() - while not self._update_processor.initialized(): - if time.time() - start_time > start_wait: - log.warn("Timeout encountered waiting for LaunchDarkly Client initialization") - return - time.sleep(0.1) + #TODO: fix- it seems to always time out. + # while not self._update_processor.initialized(): + # if time.time() - start_time > start_wait: + # log.warn("Timeout encountered waiting for LaunchDarkly Client initialization") + # return + # time.sleep(0.5) log.info("Started LaunchDarkly Client") @@ -149,13 +155,6 @@ def __init__(self, api_key, config=None, start_wait=5): def api_key(self): return self._api_key - def _check_consumer(self): - with self._lock: - if not self._event_consumer or not self._event_consumer.is_alive(): - self._event_consumer = self._config.event_consumer_class( - self._queue, self._api_key, self._config) - self._event_consumer.start() - def close(self): log.info("Closing LaunchDarkly client..") if self.is_offline(): @@ -165,10 +164,9 @@ def close(self): if self._update_processor and self._update_processor.is_alive(): self._update_processor.stop() - def _send(self, event): + def _send_event(self, event): if self._config.offline or not self._config.events_enabled: return - self._check_consumer() event['creationDate'] = int(time.time() * 1000) if self._queue.full(): log.warning("Event queue is full-- dropped an event") @@ -177,20 +175,19 @@ def _send(self, event): def track(self, event_name, user, data=None): self._sanitize_user(user) - self._send({'kind': 'custom', 'key': event_name, + self._send_event({'kind': 'custom', 'key': event_name, 'user': user, 'data': data}) def identify(self, user): self._sanitize_user(user) - self._send({'kind': 'identify', 'key': user['key'], 'user': user}) + self._send_event({'kind': 'identify', 'key': user['key'], 'user': user}) def is_offline(self): return self._config.offline def flush(self): - if self._config.offline: + if self._config.offline or not self._config.events_enabled: return - self._check_consumer() return self._event_consumer.flush() def get_flag(self, key, user, default=False): @@ -200,7 +197,7 @@ def toggle(self, key, user, default=False): default = self._config.get_default(key, default) def send_event(value): - self._send({'kind': 'feature', 'key': key, + self._send_event({'kind': 'feature', 'key': key, 'user': user, 'value': value, 'default': default}) if self._config.offline: diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index e263692c..4d517d2a 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -119,11 +119,6 @@ def setup_function(function): client._event_consumer = mock_consumer() -@pytest.fixture(autouse=True) -def noop_check_consumer(monkeypatch): - monkeypatch.setattr(client, '_check_consumer', noop_consumer) - - def wait_for_event(c, cb): e = c._queue.get(False) return cb(e) From 4c819c53591a4c6941ac43d17e43136619444781 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Tue, 5 Jul 2016 09:39:37 -0700 Subject: [PATCH 32/42] Bump version. Rename some things. --- ldclient/feature_requester.py | 2 +- ldclient/feature_store.py | 4 ++-- ldclient/streaming.py | 2 +- setup.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 1a31cb54..1c72c34a 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -22,7 +22,7 @@ def get_all(self): features = r.json() return features - def get(self, key): + def get_one(self, key): hdrs = _headers(self._api_key) uri = self._config.get_latest_features_uri + '/' + key r = self._session.get(uri, diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 6d1fc2ca..9420da09 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -15,10 +15,10 @@ def get(self, key): self._lock.rlock() f = self._features.get(key) if f is None: - log.debug("Attempted to get missing feature: " + str(key) + ". Returning None") + log.debug("Attempted to get missing feature: " + str(key) + " Returning None") return None if 'deleted' in f and f['deleted']: - log.debug("Attempted to retrieve deleted feature: " + str(key) + ". Returning None") + log.debug("Attempted to get deleted feature: " + str(key) + " Returning None") return None return f finally: diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 6f978931..dd0f7561 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -49,7 +49,7 @@ def process_message(store, requester, msg): store.upsert(key, feature) elif msg.event == "indirect/patch": key = payload['data'] - store.upsert(key, requester.get(key)) + store.upsert(key, requester.get_one(key)) elif msg.event == "indirect/put": store.init(requester.get_all()) elif msg.event == 'delete': diff --git a/setup.py b/setup.py index 0f700076..49b2f794 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ def run(self): setup( name='ldclient-py', - version='0.20.3', + version='1.0.0', author='Catamorphic Co.', author_email='team@catamorphic.com', packages=['ldclient'], From ab4929cead57c2bbeb077f689066674f900bdddc Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 6 Jul 2016 11:48:41 -0700 Subject: [PATCH 33/42] Add start_wait. --- demo/demo.py | 10 +++++----- ldclient/__init__.py | 23 +++++++++++++++++++++++ ldclient/client.py | 25 +++++++++++++------------ ldclient/event_consumer.py | 2 +- ldclient/feature_store.py | 7 +++---- ldclient/polling.py | 10 +++++++--- ldclient/streaming.py | 19 +++++++++++++------ ldclient/twisted_impls.py | 8 +++++--- requirements.txt | 5 ++--- 9 files changed, 72 insertions(+), 37 deletions(-) diff --git a/demo/demo.py b/demo/demo.py index 442ff5fa..a91c51d4 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -1,8 +1,9 @@ from __future__ import print_function -from ldclient import LDClient, Config + import logging import sys -import time + +from ldclient import LDClient root = logging.getLogger() root.setLevel(logging.DEBUG) @@ -14,12 +15,11 @@ root.addHandler(ch) if __name__ == '__main__': - apiKey = 'your api key' - client = LDClient(apiKey) + api_key = 'api_key' + client = LDClient(api_key, start_wait=10) print(client.api_key) user = {u'key': 'userKey'} print(client.toggle("update-app", user, False)) - time.sleep(10) client.close() diff --git a/ldclient/__init__.py b/ldclient/__init__.py index 97ebd5ec..d297ede7 100644 --- a/ldclient/__init__.py +++ b/ldclient/__init__.py @@ -1,3 +1,5 @@ +import threading + from .client import * from ldclient.version import VERSION from .util import log @@ -11,6 +13,27 @@ "firstName", "lastName", "avatar", "name", "anonymous"] +"""Settings.""" +client = None +api_key = None +start_wait = 5 +config = Config() + +_lock = threading.Lock() + + +def get(): + try: + _lock.acquire() + global client + if not client: + log.debug("Initializing LaunchDarkly Client") + client = LDClient(api_key, config, start_wait) + return client + finally: + _lock.release() + + # Add a NullHandler for Python < 2.7 compatibility class NullHandler(logging.Handler): diff --git a/ldclient/client.py b/ldclient/client.py index 0eecec03..dab03c45 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -1,5 +1,6 @@ from __future__ import division, with_statement, absolute_import +import threading import time import requests @@ -128,28 +129,28 @@ def __init__(self, api_key, config=None, start_wait=5): self._feature_requester = FeatureRequesterImpl(api_key, self._config) """ :type: FeatureRequester """ + update_processor_ready = threading.Event() + if self._config.update_processor_class: self._update_processor = self._config.update_processor_class( - api_key, self._config, self._feature_requester, self._store) + api_key, self._config, self._feature_requester, self._store, update_processor_ready) else: if self._config.stream: self._update_processor = StreamingUpdateProcessor( - api_key, self._config, self._feature_requester, self._store) + api_key, self._config, self._feature_requester, self._store, update_processor_ready) else: self._update_processor = PollingUpdateProcessor( - api_key, self._config, self._feature_requester, self._store) + api_key, self._config, self._feature_requester, self._store, update_processor_ready) """ :type: UpdateProcessor """ - start_time = time.time() self._update_processor.start() - #TODO: fix- it seems to always time out. - # while not self._update_processor.initialized(): - # if time.time() - start_time > start_wait: - # log.warn("Timeout encountered waiting for LaunchDarkly Client initialization") - # return - # time.sleep(0.5) - - log.info("Started LaunchDarkly Client") + log.info("Waiting up to " + str(start_wait) + " seconds for LaunchDarkly client to initialize...") + update_processor_ready.wait(start_wait) + + if self._update_processor.initialized: + log.info("Started LaunchDarkly Client: OK") + else: + log.info("Initialization timeout exceeded for LaunchDarkly Client. Feature Flags may not yet be available.") @property def api_key(self): diff --git a/ldclient/event_consumer.py b/ldclient/event_consumer.py index 1990285e..98e94dc9 100644 --- a/ldclient/event_consumer.py +++ b/ldclient/event_consumer.py @@ -20,7 +20,7 @@ def __init__(self, event_queue, api_key, config): self._api_key = api_key self._config = config self._queue = event_queue - self._running = False + self._running = True def run(self): log.debug("Starting event consumer") diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 9420da09..04e5a3a4 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -15,10 +15,10 @@ def get(self, key): self._lock.rlock() f = self._features.get(key) if f is None: - log.debug("Attempted to get missing feature: " + str(key) + " Returning None") + log.warn("Attempted to get missing feature: " + str(key) + " Returning None") return None if 'deleted' in f and f['deleted']: - log.debug("Attempted to get deleted feature: " + str(key) + " Returning None") + log.warn("Attempted to get deleted feature: " + str(key) + " Returning None") return None return f finally: @@ -64,11 +64,10 @@ def upsert(self, key, feature): finally: self._lock.unlock() - @property def initialized(self): try: self._lock.rlock() return self._initialized finally: - self._lock.runlock() \ No newline at end of file + self._lock.runlock() diff --git a/ldclient/polling.py b/ldclient/polling.py index 3dd0712e..0c6ef11d 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -6,14 +6,15 @@ class PollingUpdateProcessor(Thread, UpdateProcessor): - def __init__(self, api_key, config, requester, store): + def __init__(self, api_key, config, requester, store, ready): Thread.__init__(self) self.daemon = True self._api_key = api_key self._config = config self._requester = requester self._store = store - self._running = False + self._running = True + self._ready = ready def run(self): if not self._running: @@ -22,12 +23,15 @@ def run(self): while self._running: start_time = time.time() self._store.init(self._requester.get_all()) + if not self._ready.is_set() and self._store.initialized: + self._ready.set() + log.info("StreamingUpdateProcessor initialized ok") elapsed = time.time() - start_time if elapsed < self._config.poll_interval: time.sleep(self._config.poll_interval - elapsed) def initialized(self): - return self._running and self._store.initialized + return self._running and self._ready.is_set() and self._store.initialized def stop(self): log.info("Stopping PollingUpdateProcessor") diff --git a/ldclient/streaming.py b/ldclient/streaming.py index dd0f7561..3fb7c6b2 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -9,17 +9,18 @@ class StreamingUpdateProcessor(Thread, UpdateProcessor): - def __init__(self, api_key, config, requester, store): + def __init__(self, api_key, config, requester, store, ready): Thread.__init__(self) self.daemon = True self._api_key = api_key self._config = config self._requester = requester self._store = store - self._running = False + self._running = True + self._ready = ready def run(self): - log.info("Starting StreamingUpdateProcessor") + log.info("Starting StreamingUpdateProcessor connecting to uri: " + self._config.stream_uri) self._running = True hdrs = _stream_headers(self._api_key) uri = self._config.stream_uri @@ -27,21 +28,24 @@ def run(self): for msg in messages: if not self._running: break - self.process_message(self._store, self._requester, msg) + self.process_message(self._store, self._requester, msg, self._ready) def stop(self): log.info("Stopping StreamingUpdateProcessor") self._running = False def initialized(self): - return self._running and self._store.initialized + return self._running and self._ready.is_set() and self._store.initialized @staticmethod - def process_message(store, requester, msg): + def process_message(store, requester, msg, ready): payload = json.loads(msg.data) log.debug("Received stream event {}".format(msg.event)) if msg.event == 'put': store.init(payload) + if not ready.is_set() and store.initialized: + ready.set() + log.info("StreamingUpdateProcessor initialized ok") elif msg.event == 'patch': key = payload['path'][1:] feature = payload['data'] @@ -52,6 +56,9 @@ def process_message(store, requester, msg): store.upsert(key, requester.get_one(key)) elif msg.event == "indirect/put": store.init(requester.get_all()) + if not ready.is_set() and store.initialized: + ready.set() + log.info("StreamingUpdateProcessor initialized ok") elif msg.event == 'delete': key = payload['path'][1:] # noinspection PyShadowingNames diff --git a/ldclient/twisted_impls.py b/ldclient/twisted_impls.py index d377454d..acf299d2 100644 --- a/ldclient/twisted_impls.py +++ b/ldclient/twisted_impls.py @@ -68,15 +68,17 @@ class TwistedStreamProcessor(UpdateProcessor): def close(self): self.sse_client.stop() - def __init__(self, api_key, config, store, requester): + def __init__(self, api_key, config, store, requester, ready): self._store = store self._requester = requester + self._ready = ready self.sse_client = TwistedSSEClient(config.stream_uri, headers=_stream_headers(api_key, "PythonTwistedClient"), verify_ssl=config.verify_ssl, on_event=partial(StreamingUpdateProcessor.process_message, self._store, - self._requester)) + self._requester, + self._ready)) self.running = False def start(self): @@ -87,7 +89,7 @@ def stop(self): self.sse_client.stop() def initialized(self): - return self._store.initialized() + return self._ready.is_set() and self._store.initialized() def is_alive(self): return self.running and self._store.initialized() diff --git a/requirements.txt b/requirements.txt index 5295651d..f56b29bd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ CacheControl>=0.10.2 -requests>=2.4.0 -future>=0.14.3 -sseclient>=0.0.9 \ No newline at end of file +requests>=2.10.0 +sseclient>=0.0.12 \ No newline at end of file From 4e32f002b36efc1a92c8463b03dd4894383c2154 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 6 Jul 2016 11:56:50 -0700 Subject: [PATCH 34/42] add future --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f56b29bd..4cdeaa9a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ CacheControl>=0.10.2 requests>=2.10.0 -sseclient>=0.0.12 \ No newline at end of file +sseclient>=0.0.12 +future>=0.15.2 \ No newline at end of file From 62ceb43968ba372873f8199115019b2a13ad1788 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 6 Jul 2016 12:04:26 -0700 Subject: [PATCH 35/42] cahnge log statement. --- ldclient/feature_store.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 04e5a3a4..f24335d2 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -15,10 +15,10 @@ def get(self, key): self._lock.rlock() f = self._features.get(key) if f is None: - log.warn("Attempted to get missing feature: " + str(key) + " Returning None") + log.debug("Attempted to get missing feature: " + str(key) + " Returning None") return None if 'deleted' in f and f['deleted']: - log.warn("Attempted to get deleted feature: " + str(key) + " Returning None") + log.debug("Attempted to get deleted feature: " + str(key) + " Returning None") return None return f finally: From 739ccb1875f763cdb653e47e4db397e21959b6ac Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 6 Jul 2016 12:42:48 -0700 Subject: [PATCH 36/42] Make python 3 happy. --- ldclient/redis_feature_store.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index d7b254db..2765181c 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -35,7 +35,8 @@ def init(self, features): pipe.delete(self._features_key) self._cache.clear() - for k, f in features.iteritems(): + + for k, f in features.items(): f_json = json.dumps(f, encoding='utf-8') pipe.hset(self._features_key, k, f_json) self._cache[k] = f From 0596d504a2a72f3808e3d0217166bb8be9833ac6 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 6 Jul 2016 12:55:55 -0700 Subject: [PATCH 37/42] Make python 3 happy. --- ldclient/redis_feature_store.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index 2765181c..995a559a 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -37,7 +37,7 @@ def init(self, features): self._cache.clear() for k, f in features.items(): - f_json = json.dumps(f, encoding='utf-8') + f_json = json.dumps(f) pipe.hset(self._features_key, k, f_json) self._cache[k] = f pipe.execute() @@ -84,7 +84,7 @@ def delete(self, key, version): f['version'] = version elif f is None: f = {'deleted': True, 'version': version} - f_json = json.dumps(f, encoding='utf-8') + f_json = json.dumps(f) r.hset(self._features_key, key, f_json) self._cache[key] = f r.unwatch() @@ -112,7 +112,7 @@ def upsert(self, key, feature): r.unwatch() return - feature_json = json.dumps(feature, encoding='utf-8') + feature_json = json.dumps(feature) r.hset(self._features_key, key, feature_json) self._cache[key] = feature r.unwatch() From 1350833c125ee99dd98ab9a71f35794849ef38cf Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 6 Jul 2016 13:10:21 -0700 Subject: [PATCH 38/42] Start polling processor for real this time --- ldclient/polling.py | 4 ++-- ldclient/streaming.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ldclient/polling.py b/ldclient/polling.py index 0c6ef11d..ff09f0c8 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -13,7 +13,7 @@ def __init__(self, api_key, config, requester, store, ready): self._config = config self._requester = requester self._store = store - self._running = True + self._running = False self._ready = ready def run(self): @@ -24,8 +24,8 @@ def run(self): start_time = time.time() self._store.init(self._requester.get_all()) if not self._ready.is_set() and self._store.initialized: - self._ready.set() log.info("StreamingUpdateProcessor initialized ok") + self._ready.set() elapsed = time.time() - start_time if elapsed < self._config.poll_interval: time.sleep(self._config.poll_interval - elapsed) diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 3fb7c6b2..f7e66632 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -16,7 +16,7 @@ def __init__(self, api_key, config, requester, store, ready): self._config = config self._requester = requester self._store = store - self._running = True + self._running = False self._ready = ready def run(self): From f2cb7a3317706c815756fda4d41b09cbe23a0a21 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 6 Jul 2016 13:46:51 -0700 Subject: [PATCH 39/42] Update readme --- README.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 75b51ead..1e36ff20 100644 --- a/README.md +++ b/README.md @@ -13,9 +13,14 @@ Quick setup pip install ldclient-py -2. Create a new LDClient with your API key: +2. Configure the library with your api key: - client = LDClient("your_api_key") + import ldclient + ldclient.api_key = "your api key" + +3. Get the client: + + client = ldclient.get() Your first feature flag ----------------------- From 9c23fc40f88c394caff650949ad99b1884c64f20 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 6 Jul 2016 14:15:10 -0700 Subject: [PATCH 40/42] Change redis feature store to take url --- ldclient/redis_feature_store.py | 9 ++------- testing/test_feature_store.py | 4 ++-- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index 995a559a..ddd615ed 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -15,8 +15,7 @@ def __setitem__(self, key, value): class RedisFeatureStore(FeatureStore): def __init__(self, - host='localhost', - port=6379, + url='redis://localhost:6379/0', prefix='launchdarkly', max_connections=16, expiration=15, @@ -25,10 +24,7 @@ def __init__(self, self._features_key = "{}:features".format(prefix) self._cache = ForgetfulDict() if expiration == 0 else ExpiringDict(max_len=capacity, max_age_seconds=expiration) - self._pool = redis.ConnectionPool(max_connections=max_connections, - host=host, - port=port, - db=0) + self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) def init(self, features): pipe = redis.Redis(connection_pool=self._pool).pipeline() @@ -116,4 +112,3 @@ def upsert(self, key, feature): r.hset(self._features_key, key, feature_json) self._cache[key] = feature r.unwatch() - diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 338e6373..96bb140c 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -15,12 +15,12 @@ def in_memory(self): def redis_with_local_cache(self): r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) r.delete("launchdarkly:features") - return RedisFeatureStore(host=self.redis_host, port=self.redis_port) + return RedisFeatureStore() def redis_no_local_cache(self): r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) r.delete("launchdarkly:features") - return RedisFeatureStore(host=self.redis_host, port=self.redis_port, expiration=0) + return RedisFeatureStore(expiration=0) params = [in_memory, redis_with_local_cache, redis_no_local_cache] From 467ad4e9dca9df667384b1672c592c6b034b1e20 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Wed, 6 Jul 2016 14:17:55 -0700 Subject: [PATCH 41/42] always set a feature store --- ldclient/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldclient/client.py b/ldclient/client.py index dab03c45..60a0ecd6 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -73,7 +73,7 @@ def __init__(self, poll_interval = 1 self.poll_interval = poll_interval self.use_ldd = use_ldd - self.feature_store = feature_store + self.feature_store = InMemoryFeatureStore() if not feature_store else feature_store self.event_consumer_class = EventConsumerImpl if not event_consumer_class else event_consumer_class self.feature_requester_class = feature_requester_class self.connect_timeout = connect_timeout From 605fb13c0e42c7bf9067426dd41840910fe3f407 Mon Sep 17 00:00:00 2001 From: Dan Richelson Date: Thu, 7 Jul 2016 11:33:56 -0700 Subject: [PATCH 42/42] Address PR comments. Fix weird formatting in test file. --- demo/demo.py | 8 ++++---- ldclient/__init__.py | 19 +++++++++++++------ ldclient/polling.py | 2 +- testing/test_ldclient.py | 24 ++++++++++++++---------- 4 files changed, 32 insertions(+), 21 deletions(-) diff --git a/demo/demo.py b/demo/demo.py index a91c51d4..9cf4e3fc 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -3,7 +3,7 @@ import logging import sys -from ldclient import LDClient +import ldclient root = logging.getLogger() root.setLevel(logging.DEBUG) @@ -15,9 +15,9 @@ root.addHandler(ch) if __name__ == '__main__': - api_key = 'api_key' - client = LDClient(api_key, start_wait=10) - print(client.api_key) + ldclient._api_key = 'api_key' + ldclient.start_wait = 10 + client = ldclient.get() user = {u'key': 'userKey'} print(client.toggle("update-app", user, False)) diff --git a/ldclient/__init__.py b/ldclient/__init__.py index d297ede7..0d80a640 100644 --- a/ldclient/__init__.py +++ b/ldclient/__init__.py @@ -1,9 +1,9 @@ -import threading +import logging -from .client import * +from ldclient.rwlock import ReadWriteLock from ldclient.version import VERSION +from .client import * from .util import log -import logging __version__ = VERSION @@ -19,19 +19,26 @@ start_wait = 5 config = Config() -_lock = threading.Lock() +_lock = ReadWriteLock() def get(): try: - _lock.acquire() + _lock.rlock() + if client: + return client + finally: + _lock.runlock() + + try: global client + _lock.lock() if not client: log.debug("Initializing LaunchDarkly Client") client = LDClient(api_key, config, start_wait) return client finally: - _lock.release() + _lock.unlock() # Add a NullHandler for Python < 2.7 compatibility diff --git a/ldclient/polling.py b/ldclient/polling.py index ff09f0c8..dace8724 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -24,7 +24,7 @@ def run(self): start_time = time.time() self._store.init(self._requester.get_all()) if not self._ready.is_set() and self._store.initialized: - log.info("StreamingUpdateProcessor initialized ok") + log.info("PollingUpdateProcessor initialized ok") self._ready.set() elapsed = time.time() - start_time if elapsed < self._config.poll_interval: diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 4d517d2a..c85abd63 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -136,7 +136,8 @@ def test_toggle_event(): client.toggle('feature.key', user, default=None) def expected_event(e): - return e['kind'] == 'feature' and e['key'] == 'feature.key' and e['user'] == user and e['value'] == True and e['default'] == None + return e['kind'] == 'feature' and e['key'] == 'feature.key' and e['user'] == user and e['value'] == True \ + and e['default'] == None assert expected_event(client._queue.get(False)) @@ -150,9 +151,8 @@ def test_toggle_event_numeric_user_key(): client.toggle('feature.key', numeric_key_user, default=None) def expected_event(e): - return e['kind'] == 'feature' and e['key'] == 'feature.key' and e['user'] == sanitized_numeric_key_user and e[ - 'value'] == True and \ - e['default'] == None + return e['kind'] == 'feature' and e['key'] == 'feature.key' and e['user'] == sanitized_numeric_key_user \ + and e['value'] == True and e['default'] == None assert expected_event(client._queue.get(False)) @@ -197,8 +197,8 @@ def test_track_numeric_key_user(): client.track('my_event', numeric_key_user, 42) def expected_event(e): - return e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == sanitized_numeric_key_user and e[ - 'data'] == 42 + return e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == sanitized_numeric_key_user \ + and e['data'] == 42 assert expected_event(client._queue.get(False)) @@ -216,8 +216,10 @@ def test_defaults(): def test_defaults_and_online(): expected = "bar" - my_client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": expected}, - event_consumer_class=MockConsumer, feature_requester_class=MockFeatureRequester, + my_client = LDClient("API_KEY", Config("http://localhost:3000", + defaults={"foo": expected}, + event_consumer_class=MockConsumer, + feature_requester_class=MockFeatureRequester, feature_store=InMemoryFeatureStore())) actual = my_client.toggle('foo', user, default="originalDefault") assert actual == expected @@ -225,8 +227,10 @@ def test_defaults_and_online(): def test_defaults_and_online_no_default(): - client = LDClient("API_KEY", Config("http://localhost:3000", defaults={"foo": "bar"}, - event_consumer_class=MockConsumer, feature_requester_class=MockFeatureRequester)) + client = LDClient("API_KEY", Config("http://localhost:3000", + defaults={"foo": "bar"}, + event_consumer_class=MockConsumer, + feature_requester_class=MockFeatureRequester)) assert "jim" == client.toggle('baz', user, default="jim") assert wait_for_event(client, lambda e: e['kind'] == 'feature' and e['key'] == u'baz' and e['user'] == user)