From 365bee9bbd4047c1b7349eb2fd3ca2e06433f0f8 Mon Sep 17 00:00:00 2001 From: ds Date: Wed, 23 Nov 2022 09:27:17 +0100 Subject: [PATCH] python sdk 1.0.0 --- .flake8 | 39 + .gitignore | 94 ++ MANIFEST.in | 3 + README.md | 132 ++- dev-requirements.txt | 10 + fbclient/__init__.py | 71 ++ fbclient/category.py | 34 + fbclient/client.py | 398 +++++++++ fbclient/common_types.py | 318 +++++++ fbclient/config.py | 242 ++++++ fbclient/data_storage.py | 107 +++ fbclient/evaluator.py | 306 +++++++ fbclient/event_processor.py | 202 +++++ fbclient/event_types.py | 163 ++++ fbclient/interfaces.py | 267 ++++++ fbclient/status.py | 89 ++ fbclient/status_types.py | 106 +++ fbclient/streaming.py | 231 +++++ fbclient/update_processor.py | 19 + fbclient/utils/__init__.py | 148 ++++ fbclient/utils/exceptions.py | 15 + .../exponential_backoff_jitter_strategy.py | 51 ++ fbclient/utils/http_client.py | 149 ++++ fbclient/utils/repeatable_task.py | 37 + fbclient/utils/rwlock.py | 39 + .../utils/variation_splitting_algorithm.py | 25 + fbclient/version.py | 1 + pyproject.toml | 3 + pytest.ini | 5 + requirements.txt | 4 + setup.py | 59 ++ tests/__init__.py | 0 tests/fbclient_test_data.json | 804 ++++++++++++++++++ tests/test_data_storage.py | 100 +++ tests/test_data_update_status_provider.py | 120 +++ tests/test_evaluator.py | 138 +++ tests/test_event_processor.py | 124 +++ tests/test_fbclient.py | 250 ++++++ 38 files changed, 4902 insertions(+), 1 deletion(-) create mode 100644 .flake8 create mode 100644 MANIFEST.in create mode 100644 dev-requirements.txt create mode 100644 fbclient/__init__.py create mode 100644 fbclient/category.py create mode 100644 fbclient/client.py create mode 100644 fbclient/common_types.py create mode 100644 fbclient/config.py create mode 100644 fbclient/data_storage.py create mode 100644 fbclient/evaluator.py create mode 100644 fbclient/event_processor.py create mode 100644 fbclient/event_types.py create mode 100644 fbclient/interfaces.py create mode 100644 fbclient/status.py create mode 100644 fbclient/status_types.py create mode 100644 fbclient/streaming.py create mode 100644 fbclient/update_processor.py create mode 100644 fbclient/utils/__init__.py create mode 100644 fbclient/utils/exceptions.py create mode 100644 fbclient/utils/exponential_backoff_jitter_strategy.py create mode 100644 fbclient/utils/http_client.py create mode 100644 fbclient/utils/repeatable_task.py create mode 100644 fbclient/utils/rwlock.py create mode 100644 fbclient/utils/variation_splitting_algorithm.py create mode 100644 fbclient/version.py create mode 100644 pyproject.toml create mode 100644 pytest.ini create mode 100644 requirements.txt create mode 100644 setup.py create mode 100644 tests/__init__.py create mode 100644 tests/fbclient_test_data.json create mode 100644 tests/test_data_storage.py create mode 100644 tests/test_data_update_status_provider.py create mode 100644 tests/test_evaluator.py create mode 100644 tests/test_event_processor.py create mode 100644 tests/test_fbclient.py diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..c7078b8 --- /dev/null +++ b/.flake8 @@ -0,0 +1,39 @@ +[flake8] + +################### FILE PATTERNS ########################## + +# Provide a comma-separated list of glob patterns to exclude from checks. +exclude = + # git folder + .git, + # python cache + __pycache__, + # pytest cache + .pytest_cache, + # mypy cache + .mypy_cache, + build, + dist +# Provide a comma-separate list of glob patterns to include for checks. +filename = *.py + +########## Options ########## + +# Report all errors, even if it is on the same line as a `# NOQA` comment. +disable-noqa = False + +# Set the maximum length that any line (with some exceptions) may be. +max-line-length = 199 +# Set the maximum allowed McCabe complexity value for a block of code. +max-complexity = 10 + +########## Rules ########## +ignore = + E133, + E203, + W503, + C901, + E722 + +per-file-ignores = + __init__.py:F401,F403 \ No newline at end of file diff --git a/.gitignore b/.gitignore index b6e4761..fa3dc6b 100644 --- a/.gitignore +++ b/.gitignore @@ -127,3 +127,97 @@ dmypy.json # Pyre type checker .pyre/ + +### IDEs + +#### VSCode + +.vscode/* +!.vscode/tasks.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +#### JetBrains IDEs + +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf +.idea/**/.gitignore +.idea/**/inspectionProfiles/profiles_settings.xml +.idea/**/vcs.xml +.idea/**/misc.xml +.idea/**/modules.xml +.idea/**/*.iml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +intergration_tests/* \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..3b52a48 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,3 @@ +include requirements.txt +include README.md +include dev-requirements.txt \ No newline at end of file diff --git a/README.md b/README.md index 3b98cf7..025a8f4 100644 --- a/README.md +++ b/README.md @@ -1 +1,131 @@ -# featbit-python-sdk \ No newline at end of file +# FeatBit python sdk + +## Introduction + +This is the Python Server SDK for the feature management platform FeatBit. It is +intended for use in a multiple-users python server applications. + +This SDK has two main purposes: + +- Store the available feature flags and evaluate the feature flags by given user in the server side SDK +- Sends feature flags usage, and custom events for the insights and A/B/n testing. + +## Data synchonization + +We use websocket to make the local data synchronized with the server, and then store them in the memory by default. +Whenever there is any changes to a feature flag or his related data, the changes would be pushed to the SDK, the average +synchronization time is less than **100** ms. Be aware the websocket connection can be interrupted by any error or +internet interruption, but it would be restored automatically right after the problem is gone. + +## Offline mode support + +In the offline mode, SDK DOES not exchange any data with feature flag center, this mode is only use for internal test for instance. + +To open the offline mode: +```python +config = Config(env_secret, event_url, streaming_url, offline=True) +``` + +## Evaluation of a feature flag + +SDK will initialize all the related data(feature flags, segments etc.) in the bootstrapping and receive the data updates +in real time, as mentioned in the above + +After initialization, the SDK has all the feature flags in the memory and all evaluation is done locally and synchronously, the average evaluation time is < **10** ms. + +## Installation +install the sdk in using pip, this version of the SDK is compatible with Python 3.6 through 3.10. + +``` +pip install fb-python-sdk +``` + +## SDK + +Applications SHOULD instantiate a single instance for the lifetime of the application. In the case where an application +needs to evaluate feature flags from different environments, you may create multiple clients, but they should still be +retained for the lifetime of the application rather than created per request or per thread. + +### Bootstrapping + +The bootstrapping is in fact the call of constructor of `FFCClient`, in which the SDK will be initialized and connect to feature flag center + +The constructor will return when it successfully connects, or when the timeout(default: 15 seconds) expires, whichever comes first. If it has not succeeded in connecting when the timeout elapses, you will receive the client in an uninitialized state where feature flags will return default values; it will still continue trying to connect in the background unless there has been a network error or you close the client(using `stop()`). You can detect whether initialization has succeeded by calling `initialize()`. + +The best way to use the SDK as a singleton, first make sure you have called `fbclient.set_config()` at startup time. Then `fbclient.get()` will return the same shared `fbclient.client.FFCClient` instance each time. The client will be initialized if it runs first time. +```python +from fbclient.config import Config +from fbclient import get, set_config + +set_config(Config(env_secret, event_url, streaming_url)) +client = get() + +if client.initialize: + # your code + +``` +You can also manage your `fbclient.client.FBClient`, the SDK will be initialized if you call `fbclient.client.FBClient` constructor. +```python +from fbclient.config import Config +from fbclient.client import FBClient + +client = FBClient(Config(env_secret, event_url, streaming_url), start_wait=15) + +if client.initialize: + # your code + +``` +If you prefer to have the constructor return immediately, and then wait for initialization to finish at some other point, you can use `fbclient.client.fbclient.update_status_provider` object, which provides an asynchronous way, as follows: + +``` python +from fbclient.config import Config +from fbclient.client import FBClient + +client = FFCClient(Config(env_secret), start_wait=0) +if client._update_status_provider.wait_for_OKState(): + # your code + +``` + + +### Evaluation + +SDK calculates the value of a feature flag for a given user, and returns a flag vlaue/an object that describes the way that the value was determined. + +`User`: A dictionary of attributes that can affect flag evaluation, usually corresponding to a user of your application. +This object contains built-in properties(`key`, `name`). The `key` and `name` are required. The `key` must uniquely identify each user; this could be a username or email address for authenticated users, or a ID for anonymous users. The `name` is used to search your user quickly. You may also define custom properties with arbitrary names and values. +For instance, the custom key should be a string; the custom value should be a string or a number + +```python +if client.initialize: + user = {'key': user_key, 'name': user_name, 'age': age} + flag_value = client.variation(flag_key, user, default_value) + # your if/else code according to flag value + +``` +If evaluation called before SDK client initialized or you set the wrong flag key or user for the evaluation, SDK will return +the default value you set. The `fbclient.common_types.FlagState` will explain the details of the last evaluation including error raison. + +If you would like to get variations of all feature flags in a special environment, you can use `fbclient.client.FBClient.get_all_latest_flag_variations`, SDK will return `fbclient.common_types.AllFlagStates`, that explain the details of all feature flags +```python +if client.initialize: + user = {'key': user_key, 'name': user_name} + all_flag_values = client.get_all_latest_flag_variations(user) + ed = all_flag_values.get(flag_key) + flag_value = ed.variation + # your if/else code according to flag value + + +``` + +### Experiments (A/B/n Testing) +We support automatic experiments for pageviews and clicks, you just need to set your experiment on our SaaS platform, then you should be able to see the result in near real time after the experiment is started. + +In case you need more control over the experiment data sent to our server, we offer a method to send custom event. +```python +client.track_metric(user, event_name, numeric_value); +``` +**numeric_value** is not mandatory, the default value is **1**. + +Make sure `track_metric` is called after the related feature flag is evaluated by simply calling `variation` or `variation_detail` +otherwise, the custom event may not be included into the experiment result. \ No newline at end of file diff --git a/dev-requirements.txt b/dev-requirements.txt new file mode 100644 index 0000000..fb03fa4 --- /dev/null +++ b/dev-requirements.txt @@ -0,0 +1,10 @@ +certifi>=2018.4.16 +urllib3>=1.22.0 +websocket-client>=1.0.0 +python-dateutil>=2.8.2 +flake8 +pytest +pytest-mock +autopep8 +build +twine \ No newline at end of file diff --git a/fbclient/__init__.py b/fbclient/__init__.py new file mode 100644 index 0000000..054a925 --- /dev/null +++ b/fbclient/__init__.py @@ -0,0 +1,71 @@ +from fbclient.client import FBClient +from fbclient.config import Config +from fbclient.utils.rwlock import ReadWriteLock +from fbclient.utils import log + +"""Settings.""" +start_wait = 15 + +__client = None +__config = None +__lock = ReadWriteLock() + + +def get() -> FBClient: + """Returns the singleton Python SDK client instance, using the current configuration. + + To use the SDK as a singleton, first make sure you have called :func:`fbclient.set_config()` + at startup time. Then :func:`fbclient.get()` will return the same shared :class:`fbclient.client.FBClient` + instance each time. The client will be initialized if it runs first time. + ``` + set_config(Config(env_secret, event_url, streaming_url)) + client = get() + ``` + If you need to create multiple client instances with different environments, instead of this + singleton approach you can call directly the :class:`fbclient.client.FBClient` constructor. + """ + global __config + global __client + global __lock + + try: + __lock.read_lock() + if __client: + return __client + if not __config: + raise Exception("config is not initialized") + finally: + __lock.release_read_lock() + + try: + __lock.write_lock() + if not __client: + log.info("FB Python SDK: FB Python Client is initializing...") + __client = FBClient(__config, start_wait) + return __client + finally: + __lock.release_write_lock() + + +def set_config(config: Config): + """Sets the configuration for the shared SDK client instance. + + If this is called prior to :func:`fbclient.get()`, it stores the configuration that will be used when the + client is initialized. If it is called after the client has already been initialized, the client will be + re-initialized with the new configuration. + + :param config: the client configuration + """ + global __config + global __client + global __lock + + try: + __lock.write_lock() + if __client: + __client.stop() + log.info('FB Python SDK: FB Python Client is reinitializing...') + __client = FBClient(config, start_wait) + finally: + __config = config + __lock.release_write_lock() diff --git a/fbclient/category.py b/fbclient/category.py new file mode 100644 index 0000000..b944bde --- /dev/null +++ b/fbclient/category.py @@ -0,0 +1,34 @@ +class Category: + """ + This class is used only by the internals of the feature flag storage mechanism. + This type will be passed to the feature flag storage methods; + its ``name`` property tells the feature store which collection of data is being referenced ("featureflags", "segments", etc.) + The purpose is for the storage module to store data as completely generic JSON database + """ + + def __init__(self, name, tag): + self._name = name + self._tag = tag + + @property + def name(self): + return self._name + + @property + def tag(self): + return self._tag + + +FEATURE_FLAGS = Category('featureFlags', 'ff') + +SEGMENTS = Category('segments', 'seg') + +DATATEST = Category('datatest', 'test') + +""" +An enumeration of all supported types. Applications should not need to reference this object directly. +Custom data storage implementations can determine what kinds of model objects may need to be stored. +""" +ALL_CATS = [FEATURE_FLAGS, SEGMENTS, DATATEST] + +ALL_CAT_NAMES = ['featureFlags', 'segments', 'datatest'] diff --git a/fbclient/client.py b/fbclient/client.py new file mode 100644 index 0000000..71c25cb --- /dev/null +++ b/fbclient/client.py @@ -0,0 +1,398 @@ +import json +import threading +from distutils.util import strtobool +from typing import Any, Mapping, Optional, Tuple + +from fbclient.category import FEATURE_FLAGS, SEGMENTS +from fbclient.common_types import AllFlagStates, FBUser, FlagState, _EvalResult +from fbclient.config import Config +from fbclient.data_storage import NullDataStorage +from fbclient.evaluator import (REASON_CLIENT_NOT_READY, REASON_ERROR, + REASON_FLAG_NOT_FOUND, + REASON_USER_NOT_SPECIFIED, Evaluator) +from fbclient.event_processor import DefaultEventProcessor, NullEventProcessor +from fbclient.event_types import FlagEvent, Metric, MetricEvent, UserEvent +from fbclient.interfaces import DataUpdateStatusProvider +from fbclient.status import DataUpdateStatusProviderIml +from fbclient.status_types import State +from fbclient.streaming import Streaming, _data_to_dict +from fbclient.update_processor import NullUpdateProcessor +from fbclient.utils import (cast_variation_by_flag_type, check_uwsgi, log, + simple_type_inference, valide_all_data) +from fbclient.utils.http_client import DefaultSender + + +class FBClient: + """The FeatBit Python SDK client object. + + Applications SHOULD instantiate a single instance for the lifetime of the application. + In the case where an application needs to evaluate feature flags from different environments, + you may create multiple clients, but they should still be retained for the lifetime of the application + rather than created per request or per thread. + + Client instances are thread-safe. + """ + + def __init__(self, config: Config, start_wait: float = 15.): + """ + Creates a new client to connect to feature flag center with a specified configuration. + + Unless client is configured in offline mode, this client try to connect to feature flag center as soon as the constructor is called. + + The constructor will return when it successfully connects, or when the timeout (default: 15 seconds) expires, whichever comes first. + ``` + client = FBClient(Config(env_secret, event_url, streaming_url), start_wait=15) + + if client.initialize: + # your code + ``` + + If it has not succeeded in connecting when the timeout elapses, you will receive the client in an uninitialized state where feature flags will return default values; + it will still continue trying to connect in the background unless there has been an unrecoverable error or you close the client by :func:`stop`. + You can detect whether initialization has succeeded by :func:`initialize`. + + If you prefer to have the constructor return immediately, and then wait for initialization to finish at some other point, + you can use :func:`update_status_provider` as follows: + ``` + client = FBClient(Config(env_secret, event_url, streaming_url), start_wait=0) + if client._update_status_provider.wait_for_OKState(): + # your code + ``` + :param config: the client configuration + :param start_wait: the max time to wait for initialization + """ + check_uwsgi() + + if not isinstance(config, Config): + raise ValueError("Config is not valid") + + self._config = config + self._config.validate() + + # init components + # event processor + self._event_processor = self._build_event_processor(config) + self._event_handler = lambda event: self._event_processor.send_event(event) + # data storage + self._data_storage = config.data_storage + # evaluator + self._evaluator = Evaluator(lambda key: self._data_storage.get(FEATURE_FLAGS, key), + lambda key: self._data_storage.get(SEGMENTS, key)) + # data updator and status provider + self._update_status_provider = DataUpdateStatusProviderIml(config.data_storage) + # update processor + update_processor_ready = threading.Event() + self._update_processor = self._build_update_processor(config, self._update_status_provider, + update_processor_ready) + self._update_processor.start() + + if start_wait > 0: + if not isinstance(self._update_processor, NullUpdateProcessor): + log.info("FB Python SDK: Waiting for Client initialization in %s seconds" % str(start_wait)) + + if isinstance(self._data_storage, NullDataStorage): + log.info("FB Python SDK: SDK just returns default variation") + + update_processor_ready.wait(start_wait) + if self._config.is_offline: + log.info("FB Python SDK: SDK is in offline mode") + elif self._update_processor.initialized: + log.info("FB Python SDK: SDK initialization is completed") + else: + log.warning("FB Python SDK: SDK was not successfully initialized") + else: + log.info("FB Python SDK: SDK starts in asynchronous mode") + + def _build_event_processor(self, config: Config): + if config.event_processor_imp: + log.debug("Using user-specified event processor: %s" % str(config.event_processor_imp)) + return config.event_processor_imp(config, DefaultSender('insight', config, max_size=10)) + + if config.is_offline: + log.debug("Offline mode, SDK disable event processing") + return NullEventProcessor(config, DefaultSender('insight', config, max_size=10)) + + return DefaultEventProcessor(config, DefaultSender('insight', config, max_size=10)) + + def _build_update_processor(self, config: Config, update_status_provider, update_processor_event): + if config.update_processor_imp: + log.debug("Using user-specified update processor: %s" % str(config.update_processor_imp)) + return config.update_processor_imp(config, update_status_provider, update_processor_event) + + if config.is_offline: + log.debug("Offline mode, SDK disable streaming data updating") + return NullUpdateProcessor(config, update_status_provider, update_processor_event) + + return Streaming(config, update_status_provider, update_processor_event) + + @property + def initialize(self) -> bool: + """Returns true if the client has successfully connected to feature flag center. + + If this returns false, it means that the client has not yet successfully connected to feature flag center. + It might still be in the process of starting up, or it might be attempting to reconnect after an + unsuccessful attempt, or it might have received an unrecoverable error and given up. + """ + return self._update_processor.initialized + + @property + def update_status_provider(self) -> DataUpdateStatusProvider: + return self._update_status_provider + + def stop(self): + """Releases all threads and network connections used by SDK. + + Do not attempt to use the client after calling this method. + """ + log.info("FB Python SDK: Python SDK client is closing...") + self._data_storage.stop() + self._update_processor.stop() + self._event_processor.stop() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_traceback): + self.stop() + + def is_offline(self) -> bool: + """Returns true if the client is in offline mode. + """ + return self._config.is_offline + + def _get_flag_internal(self, key: str) -> Optional[dict]: + return self._data_storage.get(FEATURE_FLAGS, key) + + def __handle_default_value(self, key: str, default: Any) -> Tuple[Optional[str], Optional[str]]: + default_value = self._config.get_default_value(key, default) + default_value_type = simple_type_inference(default_value) + if default_value is None: + return None, None + elif default_value_type == 'boolean': + return default_value_type, str(default).lower() + elif default_value_type == 'json': + return default_value_type, json.dumps(default_value) + else: + return default_value_type, str(default_value) + + def _evaluate_internal(self, key: str, user: dict, default: Any = None) -> _EvalResult: + default_value_type, default_value = self.__handle_default_value(key, default) + try: + if not self.initialize: + log.warning('FB Python SDK: Evaluation called before Java SDK client initialized for feature flag, well using the default value') + return _EvalResult.error(default_value, REASON_CLIENT_NOT_READY, key, default_value_type) + + if not key: + log.warning('FB Python SDK: null feature flag key; returning default value') + return _EvalResult.error(default_value, REASON_FLAG_NOT_FOUND, key, default_value_type) + + flag = self._get_flag_internal(key) + if not flag: + log.warning('FB Python SDK: Unknown feature flag %s; returning default value' % key) + return _EvalResult.error(default_value, REASON_FLAG_NOT_FOUND, key, default_value_type) + + try: + fb_user = FBUser.from_dict(user) + except ValueError as ve: + log.warning('FB Python SDK: %s' % str(ve)) + return _EvalResult.error(default_value, REASON_USER_NOT_SPECIFIED, key, default_value_type) + + fb_event = FlagEvent(fb_user) + er = self._evaluator.evaluate(flag, fb_user, fb_event) + self._event_handler(fb_event) + return er + + except Exception as e: + log.exception('FB Python SDK: unexpected error in evaluation: %s' % str(e)) + return _EvalResult.error(default_value, REASON_ERROR, key, default_value_type) + + def variation(self, key: str, user: dict, default: Any = None) -> Any: + """Return the variation of a feature flag for a given user. + + This method will send an event back to feature flag center immediately if no error occurs. + + The result of the flag evaluation will be converted to: + 1: string if the feature flag is a string type + 2: bool if the feature flag is a boolean type + 3: Python object if the feature flag is a json type + 4: float/int if the feature flag is a numeric type + + :param key: the unique key for the feature flag + :param user: the attributes of the user + :param default: the default value of the flag, to be used if the return value is not available + :return: one of the flag's values in any type in any type of string, bool, json(Python object), and float + or the default value if flag evaluation fails + """ + er = self._evaluate_internal(key, user, default) + return cast_variation_by_flag_type(er.flag_type, er.value) + + def variation_detail(self, key: str, user: dict, default: Any = None) -> FlagState: + """"Return the variation of a feature flag for a given user, but also provides additional information + about how this value was calculated, in the property `data` of the :class:`fbclient.common_types.FlagState`. + + This method will send an event back to feature flag center immediately if no error occurs. + + :param key: the unique key for the feature flag + :param user: the attributes of the user + :param default: the default value of the flag, to be used if the return value is not available + :return: an :class:`fbclient.common_types.FlagState` object + """ + return self._evaluate_internal(key, user, default).to_flag_state + + def is_enabled(self, key: str, user: dict) -> bool: + """ + Return the bool value for a feature flag for a given user. it's strongly recommended to call this method + only in a bool feature flag, otherwise the results may not be what you expect + + This method will send an event back to feature flag center immediately if no error occurs. + + :param key: the unique key for the feature flag + :param user: the attributes of the user + :return: True or False + + """ + try: + value = self.variation(key, user, False) + return bool(strtobool(str(value))) + except ValueError: + return False + + def get_all_latest_flag_variations(self, user: dict) -> AllFlagStates: + """ + Returns an object that encapsulates the state of all feature flags for a given user + + This method does not send events back to feature flag center immediately util calling :func:`fbcclient.common_types.AllFlagStates.get()` + + :param user: the attributes of the user + :return: an :class:`fbcclient.common_types.AllFlagStates` object (will never be None; its `success` property will be False + if SDK has not been initialized or the user invalid) + """ + all_flag_details = {} + message = "" + success = True + try: + if not self.initialize: + log.warning('FB Python SDK: Evaluation called before Java SDK client initialized for feature flag') + message = REASON_CLIENT_NOT_READY + success = False + else: + try: + fb_user = FBUser.from_dict(user) + all_flags = self._data_storage.get_all(FEATURE_FLAGS) + for flag in all_flags.values(): + fb_event = FlagEvent(fb_user) + er = self._evaluator.evaluate(flag, fb_user, fb_event) + all_flag_details[er.to_evail_detail] = fb_event + except ValueError as ve: + log.warning('FB Python SDK: %s' % str(ve)) + message = REASON_USER_NOT_SPECIFIED + success = False + except: + raise + except Exception as e: + log.exception('FB Python SDK: unexpected error in evaluation: %s' % str(e)) + message = REASON_ERROR + success = False + return AllFlagStates(success, message, all_flag_details, self._event_handler) + + def is_flag_known(self, key: str) -> bool: + """ + Checks if the given flag exists in the your environment + + :param key: The key name of the flag to check + :return: True if the flag exists + """ + try: + if not self.initialize: + log.warning('FB Python SDK: isFlagKnown called before Java SDK client initialized for feature flag') + return False + return self._get_flag_internal(key) is not None + except Exception as e: + log.exception('FB Python SDK: unexpected error in is_flag_known: %s' % str(e)) + return False + + def flush(self): + """Flushes all pending events. + + Normally, batches of events are delivered in the background at intervals determined by the + `events_flush_interval` property of :class:`fbclient.config.Config`. Calling `flush()` + schedules the next event delivery to be as soon as possible; however, the delivery still + happens asynchronously on a thread, so this method will return immediately. + """ + self._event_processor.flush() + + def identify(self, user: dict): + """register an end user in the feature flag center + + :param user: the attributes of the user + """ + try: + fb_user = FBUser.from_dict(user) + except ValueError: + log.warning('FB Python SDK: user invalid') + return + + self._event_handler(UserEvent(fb_user)) + + def track_metric(self, user: dict, event_name: str, metric_value: float = 1.0): + """Tracks that a user performed a metric event. + + Our feature flag center supports to track pageviews and clicks that are specified in the dashboard UI. + This can be used to track custom metric. + + :param user: the attributes of the user + :param event_name: the name of the event, which may correspond to a goal in A/B tests + :param metric_value: a numeric value used by the experiment, default value is 1.0 + """ + if not event_name or metric_value <= 0: + log.warning('FB Python SDK: event/metric invalid') + return + try: + fb_user = FBUser.from_dict(user) + except ValueError: + log.warning('FB Python SDK: user invalid') + return + + fb_user = FBUser.from_dict(user) + metric_event = MetricEvent(fb_user).add(Metric(event_name, metric_value)) + self._event_handler(metric_event) + + def track_metrics(self, user: dict, metrics: Mapping[str, float]): + """Tracks that a user performed a map of metric events. + + if any event_name or metric_value is invalid, that metric will be ignored + + :param user: the attributes of the user + :param metrics: the pairs of event_name and metric_value + """ + if not isinstance(metrics, dict): + log.warning('FB Python SDK: metrics invalid') + return + try: + fb_user = FBUser.from_dict(user) + except ValueError: + log.warning('FB Python SDK: user invalid') + return + + metric_event = MetricEvent(fb_user) + for event_name, metric_value in metrics.items(): + if event_name and metric_value > 0: + metric_event.add(Metric(event_name, metric_value)) + self._event_handler(metric_event) + + def initialize_from_external_json(self, json_str: str) -> bool: + """SDK initialization in the offline mode, this method is mainly used for tests + + :param json_str: feature flags, segments...etc in the json format + :return: True if the initialization is well done + """ + if self._config.is_offline and json_str: + all_data = json.loads(json_str) + if valide_all_data(all_data): + version, data = _data_to_dict(all_data['data']) + res = self._update_status_provider.init(data, version) + if res: + self._update_status_provider.update_state(State.ok_state()) + return res + + return False diff --git a/fbclient/common_types.py b/fbclient/common_types.py new file mode 100644 index 0000000..690801f --- /dev/null +++ b/fbclient/common_types.py @@ -0,0 +1,318 @@ +import json +from abc import ABC, abstractmethod +from typing import Any, Callable, Dict, Iterable, Mapping, Optional + +from fbclient.utils import cast_variation_by_flag_type, is_numeric + +__BUILTINS_MAPING__ = {'key': 'keyid', + 'name': 'name', + 'keyid': 'keyid'} + +__NO_VARIATION__ = 'NE' + +__FLAG_KEY_UNKNOWN__ = 'flag key unknown' + +__FLAG_NAME_UNKNOWN__ = 'flag name unknown' + +__FLAG_VALUE_UNKNOWN__ = 'flag value unknown' + + +class Jsonfy(ABC): + + @abstractmethod + def to_json_dict(self) -> dict: + pass + + def to_json_str(self) -> str: + return json.dumps(self.to_json_dict()) + + +class FBUser(Jsonfy): + + def __init__(self, key: Optional[str], name: Optional[str], **kwargs): + self._check_argument(key, 'key is not valid') + self._check_argument(name, 'name is not valid') + self._commons = {} + self._commons['keyid'] = key + self._commons['name'] = name + self._customs = {} + if len(kwargs) > 0: + self._customs \ + .update(dict((k, str(v)) for k, v in kwargs.items() if isinstance(k, str) and k.lower() not in __BUILTINS_MAPING__.keys() and (isinstance(v, str) or is_numeric(v)))) + + @staticmethod + def from_dict(user: Dict[str, Any]) -> "FBUser": + user_copy = {} + if not isinstance(user, dict): + raise ValueError('user is not valid') + user_copy.update(user) + key = user_copy.pop('key', None) or user_copy.pop('keyid', None) + name = user_copy.pop('name', None) + return FBUser(key, name, **user_copy) + + def _check_argument(self, value, msg) -> bool: + if isinstance(value, str) and value.strip(): + return True + raise ValueError(msg) + + def get(self, prop: str, default=None) -> Optional[str]: + if not isinstance(prop, str): + return default + + if prop in self._commons: + return self._commons[prop] + + if prop.lower() in __BUILTINS_MAPING__: + return self._commons.get(__BUILTINS_MAPING__[prop.lower()], default) + + return self._customs.get(prop, default) + + def to_json_dict(self) -> dict: + json_dict = {} + json_dict['keyId'] = self._commons['keyid'] + json_dict['name'] = self._commons['name'] + json_dict['customizedProperties'] = [{'name': k, 'value': v} for k, v in self._customs.items()] + return json_dict + + +class EvalDetail(Jsonfy): + """ + The object combining the result of a flag evaluation with information about how it was calculated. + The result of the flag evaluation should be converted to: + 1: string if the feature flag is a string type + 2: bool if the feature flag is a boolean type + 3: Python object if the feature flag is a json type + 4: float if the feature flag is a numeric type + """ + + def __init__(self, + reason: str, + variation: Any, + key_name: Optional[str] = None, + name: Optional[str] = None): + """Constructs an instance. + + :param id: variation id + :param reason: main factor that influenced the flag evaluation value + :param variation: result of the flag evaluation in any type of string, bool, float/int, json(Python object) or default value if flag evaluation fails + :param key_name: key name of the flag + :param name: name of the flag + """ + self._reason = reason + self._variation = variation + self._key_name = key_name + self._name = name + + @property + def reason(self) -> str: + """A string describing the main factor that influenced the flag evaluation value. + """ + return self._reason + + @property + def variation(self) -> Any: + """The result of the flag evaluation in any type of string, bool, float/int, json(Python object) + or default value if flag evaluation fails + """ + return self._variation + + @property + def key_name(self) -> Optional[str]: + """The flag key name + """ + return self._key_name + + @property + def name(self) -> Optional[str]: + """The flag name + """ + return self._name + + def to_json_dict(self) -> dict: + json_dict = {} + json_dict['reason'] = self.reason + json_dict['variation'] = self.variation + json_dict['keyName'] = self.key_name + json_dict['name'] = self.name + return json_dict + + +class BasicFlagState: + """Abstract class representing flag state after feature flag evaluaion + """ + + def __init__(self, success: bool, message: str): + """Constructs an instance. + + :param success: True if successful + :param message: the state of last evaluation; the value is OK if successful + """ + self._success = success + self._message = 'OK' if success else message + + @property + def success(self) -> bool: + """Returns true if last evaluation was successful + """ + return self._success + + @property + def message(self) -> str: + """Message representing the state of last evaluation; the value is OK if successful + """ + return self._message + + +class FlagState(BasicFlagState, Jsonfy): + """The object representing representing flag state of a given feature flag after feature flag evaluaion + This object contains the information about how this flag vable was calculated in the property `data` + + The result of the flag evaluation should be converted to: + 1: string if the feature flag is a string type + 2: bool if the feature flag is a boolean type + 3: Python object if the feature flag is a json type + 4: float/int if the feature flag is a numeric type + """ + + def __init__(self, success: bool, message: str, data: EvalDetail): + """Constructs an instance. + + :param success: True if successful + :param message: the state of last evaluation; the value is OK if successful + :param data: the result of a flag evaluation with information about how it was calculated + """ + super().__init__(success, message) + self._data = data + + @property + def data(self) -> EvalDetail: + """return the result of a flag evaluation with information about how it was calculated""" + return self._data + + def to_json_dict(self) -> dict: + return {'success': self.success, + 'message': self.message, + 'data': self._data.to_json_dict() if self._data else None} + + +class AllFlagStates(BasicFlagState, Jsonfy): + """The object that encapsulates the state of all feature flags for a given user after feature flag evaluaion + :func:`get(key_name)` to get the state for a given feature flag key + """ + + def __init__(self, success: bool, message: str, + data: Mapping[EvalDetail, "FBEvent"], + event_handler: Callable[["FBEvent"], None]): + """Constructs an instance. + + :param success: True if successful + :param message: the state of last evaluation; the value is OK if successful + :param data: a dictionary containing state of all feature flags and their events + :event_handler: callback function used to send events to feature flag center + """ + super().__init__(success, message) + self._data = dict((ed.key_name, (ed, fb_event)) for ed, fb_event in data.items()) if data else {} + self._event_handler = event_handler + + @property + def key_names(self) -> Iterable[Optional[str]]: + """Return key names of all feature flag + """ + return self._data.keys() + + def get(self, key_name: str) -> Optional[EvalDetail]: + """Return the flag evaluation details of a given feature flag key + + This method will send event to back to feature flag center immediately + + :param key_name: key name of the flag + :return: an :class:`fbclient.common_types.EvalDetail` object + """ + ed, fb_event = self._data.get(key_name, (None, False)) + if self._event_handler and fb_event: + self._event_handler(fb_event) + return ed + + def to_json_dict(self) -> dict: + return {'success': self.success, + 'message': self.message, + 'data': [ed.to_json_dict() for ed, _ in self._data.values()] if self._data else []} + + +class FBEvent(Jsonfy, ABC): + def __init__(self, user: "FBUser"): + self._user = user + + @abstractmethod + def add(self, *elements) -> 'FBEvent': + pass + + @property + @abstractmethod + def is_send_event(self) -> bool: + pass + + +class _EvalResult: + + def __init__(self, + id: str, + value: Optional[str], + reason: str, + is_send_to_expt: bool = False, + key_name: Optional[str] = None, + name: Optional[str] = None, + flag_type: Optional[str] = 'string'): + + self.__id = id + self.__value = value + self.__reason = reason + self.__is_send_to_expt = is_send_to_expt + self.__key_name = key_name + self.__name = name + self.__flag_type = flag_type + + @staticmethod + def error(default_value: Optional[str], reason: str, key_name: Optional[str] = None, flag_type: Optional[str] = 'string'): + return _EvalResult(__NO_VARIATION__, default_value, reason, False, key_name if key_name else __FLAG_KEY_UNKNOWN__, __FLAG_NAME_UNKNOWN__, flag_type) + + @property + def id(self) -> str: + return self.__id + + @property + def value(self) -> Optional[str]: + return self.__value + + @property + def reason(self) -> str: + return self.__reason + + @property + def is_send_to_expt(self) -> bool: + return self.__is_send_to_expt + + @property + def key_name(self) -> Optional[str]: + return self.__key_name + + @property + def name(self) -> Optional[str]: + return self.__name + + @property + def flag_type(self) -> Optional[str]: + return self.__flag_type + + @property + def is_success(self) -> bool: + return self.__id != __NO_VARIATION__ + + @property + def to_evail_detail(self) -> "EvalDetail": + _value = cast_variation_by_flag_type(self.__flag_type, self.__value) + return EvalDetail(self.__reason, _value, self.__key_name, self.__name) + + @property + def to_flag_state(self) -> "FlagState": + return FlagState(self.is_success, self.__reason, self.to_evail_detail) diff --git a/fbclient/config.py b/fbclient/config.py new file mode 100644 index 0000000..0f6c482 --- /dev/null +++ b/fbclient/config.py @@ -0,0 +1,242 @@ +from threading import Event +from typing import Any, Callable, Dict, Optional, Tuple + +from fbclient.data_storage import InMemoryDataStorage +from fbclient.interfaces import (DataStorage, DataUpdateStatusProvider, + EventProcessor, Sender, UpdateProcessor) +from fbclient.utils import is_ascii, is_url + +__all__ = ['Config', 'HTTPConfig', 'WebSocketConfig'] + +try: + # https://websocket-client.readthedocs.io/en/latest/faq.html#why-is-this-library-slow + import wsaccel # noqa: F401 + + def _skip_utf8_validation(): # type: ignore + return False +except ImportError: + def _skip_utf8_validation(): + return True + + +class WebSocketConfig: + """ + FBClient websocket supports proxied connections, please read the details in https://websocket-client.readthedocs.io/en/latest/examples.html#connecting-through-a-proxy + FBClient websocket supports ssl connection, please read the details in https://websocket-client.readthedocs.io/en/latest/faq.html#what-else-can-i-do-with-sslopts + """ + + def __init__(self, + timeout: float = 5.0, + sslopt: Optional[Dict[str, Any]] = None, + proxy_type: Optional[str] = None, + proxy_host: Optional[str] = None, + proxy_port: Optional[str] = None, + proxy_auth: Optional[Tuple[str, str]] = None): + # a timeout is triggered if no connection response is received from the server after the timeout interval + self.__timeout = 5.0 if timeout is None or timeout <= 0 else min(timeout, 10.0) + self.__sslopt = sslopt + self.__proxy_type = proxy_type + self.__proxy_host = proxy_host + self.__proxy_port = proxy_port + self.__proxy_auth = proxy_auth + + @property + def skip_utf8_validation(self) -> bool: + return _skip_utf8_validation() + + @property + def timeout(self) -> float: + return self.__timeout + + @property + def sslopt(self) -> Optional[Dict[str, Any]]: + return self.__sslopt + + @property + def proxy_type(self) -> Optional[str]: + return self.__proxy_type + + @property + def proxy_host(self) -> Optional[str]: + return self.__proxy_host + + @property + def proxy_port(self) -> Optional[str]: + return self.__proxy_port + + @property + def proxy_auth(self) -> Optional[Tuple[str, str]]: + return self.__proxy_auth + + +class HTTPConfig: + + def __init__(self, + connect_timeout: float = 5.0, + read_timeout: float = 10.0, + http_proxy: Optional[str] = None, + http_proxy_auth: Optional[Tuple[str, str]] = None, + ca_certs: Optional[str] = None, + cert_file: Optional[str] = None, + disable_ssl_verification: bool = False): + + self.__connect_timeout = 5.0 if connect_timeout is None or connect_timeout <= 0 else connect_timeout + self.__read_timeout = 10.0 if read_timeout is None or read_timeout <= 0 else read_timeout + self.__http_proxy = http_proxy + self.__http_proxy_auth = http_proxy_auth + self.__ca_certs = ca_certs + self.__cert_file = cert_file + self.__disable_ssl_verification = disable_ssl_verification + + @property + def connect_timeout(self) -> float: + return self.__connect_timeout + + @property + def read_timeout(self) -> float: + return self.__read_timeout + + @property + def http_proxy(self) -> Optional[str]: + return self.__http_proxy + + @property + def http_proxy_auth(self) -> Optional[Tuple[str, str]]: + return self.__http_proxy_auth + + @property + def ca_certs(self) -> Optional[str]: + return self.__ca_certs + + @property + def cert_file(self) -> Optional[str]: + return self.__cert_file + + @property + def disable_ssl_verification(self) -> bool: + return self.__disable_ssl_verification + + +class Config: + + __STREAMING_PATH = '/streaming' + __EVENTS_PATH = '/api/public/insight/track' + + def __init__(self, + env_secret: str, + event_url: str, + streaming_url: str, + streaming_first_retry_delay: float = 1.0, + events_max_in_queue: int = 10000, + events_flush_interval: float = 1.0, + events_retry_interval: float = 0.1, + events_max_retries: int = 1, + offline: bool = False, + data_storage: Optional[DataStorage] = None, + update_processor_imp: Optional[Callable[['Config', DataUpdateStatusProvider, Event], UpdateProcessor]] = None, + event_processor_imp: Optional[Callable[['Config', Sender], EventProcessor]] = None, + http: HTTPConfig = HTTPConfig(), + websocket: WebSocketConfig = WebSocketConfig(), + defaults: Optional[dict] = None): + + self.__env_secret = env_secret + self.__event_url = event_url.rstrip('/') + self.__streaming_url = streaming_url.rstrip('/') + self.__streaming_first_retry_delay = 1.0 if streaming_first_retry_delay is None or streaming_first_retry_delay <= 0 else min( + streaming_first_retry_delay, 60.0) + self.__offline = offline + self.__data_storage = data_storage if data_storage else InMemoryDataStorage() + self.__event_processor_imp = event_processor_imp + self.__update_processor_imp = update_processor_imp + self.__events_max_in_queue = 10000 if events_max_in_queue is None else max(events_max_in_queue, 10000) + self.__events_flush_interval = 1.0 if events_flush_interval is None or events_flush_interval <= 0 else min( + events_flush_interval, 3.0) + self.__events_retry_interval = 0.1 if events_retry_interval is None or events_retry_interval <= 0 else min( + events_retry_interval, 1) + self.__events_max_retries = 1 if events_max_retries is None or events_max_retries <= 0 else min( + events_max_retries, 3) + self.__http = http + self.__websocket = websocket + self.__defaults = defaults if defaults is not None else {} + + def copy_config_in_a_new_env(self, env_secret: str, defaults=None) -> 'Config': + return Config(env_secret, + event_url=self.__event_url, + streaming_url=self.__streaming_url, + streaming_first_retry_delay=self.__streaming_first_retry_delay, + events_max_in_queue=self.__events_max_in_queue, + events_flush_interval=self.__events_flush_interval, + events_retry_interval=self.__events_retry_interval, + events_max_retries=self.__events_max_retries, + offline=self.__offline, + data_storage=self.__data_storage, + update_processor_imp=self.__update_processor_imp, + event_processor_imp=self.__event_processor_imp, + http=self.__http, + websocket=self.__websocket, + defaults=defaults if defaults is not None else self.__defaults) + + def get_default_value(self, key, default=None) -> Dict[str, Any]: + return self.__defaults.get(key, default) + + @property + def env_secret(self) -> str: + return self.__env_secret + + @property + def events_max_in_queue(self) -> int: + return self.__events_max_in_queue + + @property + def events_flush_interval(self) -> float: + return self.__events_flush_interval + + @property + def events_retry_interval(self) -> float: + return self.__events_retry_interval + + @property + def events_max_retries(self) -> int: + return self.__events_max_retries + + @property + def events_uri(self) -> str: + return self.__event_url + self.__EVENTS_PATH + + @property + def streaming_uri(self) -> str: + return self.__streaming_url + self.__STREAMING_PATH + + @property + def streaming_first_retry_delay(self) -> float: + return self.__streaming_first_retry_delay + + @property + def is_offline(self) -> bool: + return self.__offline + + @property + def data_storage(self) -> DataStorage: + return self.__data_storage + + @property + def update_processor_imp(self): + return self.__update_processor_imp + + @property + def event_processor_imp(self): + return self.__event_processor_imp + + @property + def http(self) -> HTTPConfig: + return self.__http + + @property + def websocket(self) -> WebSocketConfig: + return self.__websocket + + def validate(self): + if not is_ascii(self.__env_secret): + raise ValueError('env secret is invalid') + elif not is_url(self.__streaming_url) or not is_url(self.__event_url): + raise ValueError('streaming or event url is invalid') diff --git a/fbclient/data_storage.py b/fbclient/data_storage.py new file mode 100644 index 0000000..41d2e9b --- /dev/null +++ b/fbclient/data_storage.py @@ -0,0 +1,107 @@ +from collections import defaultdict +from typing import Mapping, Optional + +from fbclient.category import Category +from fbclient.interfaces import DataStorage +from fbclient.utils.rwlock import ReadWriteLock + + +class InMemoryDataStorage(DataStorage): + + def __init__(self): + self.__rw_lock = ReadWriteLock() + self.__initialized = False + self.__version = 0 + # initialized with a function (“default factory”) that provides the default value for a nonexistent key. + self.__storage = defaultdict(dict) + + def get(self, kind: Category, key: str) -> Optional[dict]: + try: + self.__rw_lock.read_lock() + keyItems = self.__storage[kind] + item = keyItems.get(key, None) + if (item is None) or item['isArchived']: + return None + return item + finally: + self.__rw_lock.release_read_lock() + + def get_all(self, kind: Category) -> Mapping[str, dict]: + try: + self.__rw_lock.read_lock() + keyItems = self.__storage[kind] + return dict((k, v) for k, v in keyItems.items() if not v['isArchived']) + finally: + self.__rw_lock.release_read_lock() + + def init(self, all_data: Mapping[Category, Mapping[str, dict]], version: int = 0): + if (not all_data) or not isinstance(version, int) or version <= self.__version: + return + try: + self.__rw_lock.write_lock() + self.__storage.clear() + self.__storage.update(all_data) # type: ignore + self.__initialized = True + self.__version = version + finally: + self.__rw_lock.release_write_lock() + + def upsert(self, kind: Category, key: str, item: dict, version: int = 0): + if (not kind) or (not item) or (not key) or not isinstance(version, int) or version <= self.__version: + return + try: + self.__rw_lock.write_lock() + keyItems = self.__storage[kind] + v = keyItems.get(key, None) + if (v is None) or v['timestamp'] < version: + keyItems[key] = item + self.__version = version + if not self.__initialized: + self.__initialized = True + finally: + self.__rw_lock.release_write_lock() + + @property + def initialized(self) -> bool: + try: + self.__rw_lock.read_lock() + return self.__initialized + finally: + self.__rw_lock.release_read_lock() + + @property + def latest_version(self) -> int: + try: + self.__rw_lock.read_lock() + return self.__version + finally: + self.__rw_lock.release_read_lock() + + def stop(self): + pass + + +class NullDataStorage(DataStorage): + + def get(self, kind: Category, key: str) -> Optional[dict]: + return None + + def get_all(self, kind: Category) -> Mapping[str, dict]: + return dict() + + def init(self, all_data: Mapping[Category, Mapping[str, dict]], version: int = 0): + pass + + def upsert(self, kind: Category, key: str, item: dict, version: int = 0): + pass + + @property + def initialized(self) -> bool: + return True + + @property + def latest_version(self) -> int: + return 0 + + def stop(self): + pass diff --git a/fbclient/evaluator.py b/fbclient/evaluator.py new file mode 100644 index 0000000..ecfdf68 --- /dev/null +++ b/fbclient/evaluator.py @@ -0,0 +1,306 @@ +import base64 +import json +import re +from typing import Callable, Iterable, Optional + +from fbclient.common_types import FBEvent, FBUser, _EvalResult +from fbclient.event_types import FlagEventVariation +from fbclient.utils import is_numeric, log +from fbclient.utils.variation_splitting_algorithm import \ + VariationSplittingAlgorithm + +REASON_CLIENT_NOT_READY = 'client not ready' + +REASON_FLAG_NOT_FOUND = 'flag not found' + +REASON_ERROR = 'error in evaluation' + +REASON_USER_NOT_SPECIFIED = 'user not specified' + +REASON_WRONG_TYPE = 'wrong type' + +REASON_FLAG_OFF = 'flag off' + +REASON_PREREQUISITE_FAILED = 'prerequisite failed' + +REASON_TARGET_MATCH = 'target match' + +REASON_RULE_MATCH = 'rule match' + +REASON_FALLTHROUGH = 'fall through all rules' + +__THAN_CLAUSE__ = 'Than' + +__GE_CLAUSE__ = 'BiggerEqualThan' + +__GT_CLAUSE__ = 'BiggerThan' + +__LE_CLAUSE__ = 'LessEqualThan' + +__LT_CLAUSE__ = 'LessThan' + +__EQ_CLAUSE__ = 'Equal' + +__NEQ_CLAUSE__ = 'NotEqual' + +__CONTAINS_CLAUSE__ = 'Contains' + +__NOT_CONTAIN_CLAUSE__ = 'NotContain' + +__IS_ONE_OF_CLAUSE__ = 'IsOneOf' + +__NOT_ONE_OF_CLAUSE__ = 'NotOneOf' + +__STARTS_WITH_CLAUSE__ = 'StartsWith' + +__ENDS_WITH_CLAUSE__ = 'EndsWith' + +__IS_TRUE_CLAUSE__ = 'IsTrue' + +__IS_FALSE_CLAUSE__ = 'IsFalse' + +__MATCH_REGEX_CLAUSE__ = 'MatchRegex' + +__NOT_MATCH_REGEX_CLAUSE__ = 'NotMatchRegex' + +__IS_IN_SEGMENT_CLAUSE__ = 'User is in segment' + +__NOT_IN_SEGMENT_CLAUSE__ = 'User is not in segment' + + +class Evaluator: + def __init__(self, + flag_getter: Callable[[str], Optional[dict]], + segment_getter: Callable[[str], Optional[dict]]): + self.__flag_getter = flag_getter + self.__segment_getter = segment_getter + self.__ops = [self._match_feature_flag_disabled_user_variation, + self._match_targeted_user_variation, + self._match_condition_user_variation, + self._match_default_user_variation] + + def evaluate(self, flag: dict, user: FBUser, fb_event: Optional[FBEvent] = None) -> _EvalResult: + if not flag or not user: + raise ValueError('null flag or empty user') + return self._match_user_variation(flag, user, fb_event) # type: ignore + + def _match_user_variation(self, flag: dict, user: FBUser, fb_event: Optional[FBEvent] = None) -> Optional[_EvalResult]: + er = None + try: + for op in self.__ops: + er = op(flag, user) + if er is not None: + return er + finally: + if er is not None: + log.info('FB Python SDK: User %s, Feature Flag %s, Flag Value %s' % (user.get('KeyId'), er.key_name, er.value)) + if fb_event is not None: + fb_event.add(FlagEventVariation(er)) + + # return a value when flag is off + def _match_feature_flag_disabled_user_variation(self, flag: dict, user: FBUser) -> Optional[_EvalResult]: + if not flag['isEnabled']: + return _EvalResult(flag['disabledVariationId'], + flag['variationMap'][flag['disabledVariationId']], + REASON_FLAG_OFF, + False, + flag['key'], + flag['name'], + flag['variationType']) + return None + + # return the value of target user + def _match_targeted_user_variation(self, flag: dict, user: FBUser) -> Optional[_EvalResult]: + for target in flag['targetUsers']: + if any(key_id == user.get('keyid') for key_id in target['keyIds']): + return _EvalResult(target['variationId'], + flag['variationMap'][target['variationId']], + REASON_TARGET_MATCH, + flag['exptIncludeAllTargets'], + flag['key'], + flag['name'], + flag['variationType']) + return None + + # return the value of matched rule + def _match_condition_user_variation(self, flag: dict, user: FBUser) -> Optional[_EvalResult]: + for rule in flag['rules']: + if self._match_any_rule(user, rule): + return self._get_rollout_variation_option(flag, + rule['variations'], + user, + REASON_RULE_MATCH, + flag['exptIncludeAllTargets'], + rule['includedInExpt'], + flag['key'], + flag['name']) + return None + + # get value from default rule + def _match_default_user_variation(self, flag: dict, user: FBUser) -> Optional[_EvalResult]: + return self._get_rollout_variation_option(flag, + flag['fallthrough']['variations'], + user, + REASON_FALLTHROUGH, + flag['exptIncludeAllTargets'], + flag['fallthrough']['includedInExpt'], + flag['key'], + flag['name']) + + def _match_any_rule(self, user: FBUser, rule: dict) -> bool: + # conditions cannot be empty + return all(self._process_condition(user, condiction) for condiction in rule['conditions']) + + def _process_condition(self, user: FBUser, condition: dict) -> bool: + op = condition['op'] + # segment hasn't any operation + op = condition['property'] if not op else op + if __THAN_CLAUSE__ in str(op): + return self._than(user, condition) + elif op == __EQ_CLAUSE__: + return self._equals(user, condition) + elif op == __NEQ_CLAUSE__: + return not self._equals(user, condition) + elif op == __CONTAINS_CLAUSE__: + return self._contains(user, condition) + elif op == __NOT_CONTAIN_CLAUSE__: + return not self._contains(user, condition) + elif op == __IS_ONE_OF_CLAUSE__: + return self._one_of(user, condition) + elif op == __NOT_ONE_OF_CLAUSE__: + return not self._one_of(user, condition) + elif op == __STARTS_WITH_CLAUSE__: + return self._starts_with(user, condition) + elif op == __ENDS_WITH_CLAUSE__: + return self._ends_with(user, condition) + elif op == __IS_TRUE_CLAUSE__: + return self._true(user, condition) + elif op == __IS_FALSE_CLAUSE__: + return self._false(user, condition) + elif op == __MATCH_REGEX_CLAUSE__: + return self._match_reg_exp(user, condition) + elif op == __NOT_MATCH_REGEX_CLAUSE__: + return not self._match_reg_exp(user, condition) + elif op == __IS_IN_SEGMENT_CLAUSE__: + return self._in_segment(user, condition) + elif op == __NOT_IN_SEGMENT_CLAUSE__: + return not self._in_segment(user, condition) + else: + return False + + def _than(self, user: FBUser, condition: dict) -> bool: + pv = user.get(condition['property']) + if not is_numeric(pv) or not is_numeric(condition['value']): + return False + pv_num, cv_num = round(float(pv), 5), round(float(condition['value']), 5) # type: ignore + op = condition['op'] + if op == __GE_CLAUSE__: + return pv_num >= cv_num + elif op == __GT_CLAUSE__: + return pv_num > cv_num + elif op == __LE_CLAUSE__: + return pv_num <= cv_num + elif op == __LT_CLAUSE__: + return pv_num < cv_num + else: + return False + + def _equals(self, user: FBUser, condition: dict) -> bool: + pv = user.get(condition['property']) + cv = condition['value'] + return pv is not None and cv is not None and str(pv) == str(cv) + + def _contains(self, user: FBUser, condition: dict) -> bool: + pv = user.get(condition['property']) + cv = condition['value'] + return pv is not None and cv is not None and str(cv) in str(pv) + + def _one_of(self, user: FBUser, condition: dict) -> bool: + pv = user.get(condition['property']) + try: + cv = json.loads(condition['value']) + return pv is not None and cv is not None and str(pv) in cv + except: + return False + + def _starts_with(self, user: FBUser, condition: dict) -> bool: + pv = user.get(condition['property']) + cv = condition['value'] + return pv is not None and cv is not None and str(pv).startswith(str(cv)) + + def _ends_with(self, user: FBUser, condition: dict) -> bool: + pv = user.get(condition['property']) + cv = condition['value'] + return pv is not None and cv is not None and str(pv).endswith(str(cv)) + + def _true(self, user: FBUser, condition: dict) -> bool: + pv = user.get(condition['property']) + return pv is not None and str(pv).lower() == 'true' + + def _false(self, user: FBUser, condition: dict) -> bool: + pv = user.get(condition['property']) + return pv is not None and str(pv).lower() == 'false' + + def _match_reg_exp(self, user: FBUser, clause: dict) -> bool: + pv = user.get(clause['property']) + cv = clause['value'] + return pv is not None and cv is not None and re.search(str(cv), str(pv)) is not None + + def _in_segment(self, user: FBUser, condition: dict) -> bool: + def match_segment(user: FBUser, segment: Optional[dict]) -> bool: + if not user or not segment: + return False + user_key = user.get('keyid') + if user_key in segment['excluded']: + return False + if user_key in segment['included']: + return True + return any(self._match_any_rule(user, rule) for rule in segment['rules']) + try: + cv = json.loads(condition['value']) + return cv and any(match_segment(user, self.__segment_getter(sgid)) for sgid in cv) + except: + return False + + def _get_rollout_variation_option(self, + flag: dict, + rollouts: Iterable[dict], + user: FBUser, + reason: str, + expt_include_all_targets: bool, + rule_inclued_in_expt: bool, + key_name: str, + name: str) -> Optional[_EvalResult]: + + def is_send_to_expt(user_key: str, + rollout: dict, + expt_include_all_targets: bool, + rule_inclued_in_expt: bool) -> bool: + if expt_include_all_targets: + return True + if rule_inclued_in_expt: + send_to_expt_percentage = rollout['exptRollout'] + splitting_percentage = rollout['rollout'][1] - rollout['rollout'][0] + + if send_to_expt_percentage == 0 or splitting_percentage == 0: + return False + + upper_bound = send_to_expt_percentage / splitting_percentage + if upper_bound > 1: + upper_bound = 1 + new_user_key = base64.b64encode(user_key.encode()).decode() + return VariationSplittingAlgorithm(new_user_key, [0, upper_bound]).is_key_belongs_to_percentage() + return False + + user_key = user.get('keyid') + for rollout in rollouts: + if VariationSplittingAlgorithm(user_key, rollout['rollout']).is_key_belongs_to_percentage(): # type: ignore + send_to_expt = is_send_to_expt(user_key, rollout, expt_include_all_targets, rule_inclued_in_expt) # type: ignore + return _EvalResult(rollout['id'], + flag['variationMap'][rollout['id']], + reason, + send_to_expt, + key_name, + name, + flag['variationType']) + return None diff --git a/fbclient/event_processor.py b/fbclient/event_processor.py new file mode 100644 index 0000000..7a4b336 --- /dev/null +++ b/fbclient/event_processor.py @@ -0,0 +1,202 @@ +import json +from concurrent.futures import ThreadPoolExecutor +from queue import Empty, Queue +from threading import BoundedSemaphore, Lock, Thread +from typing import List, Optional + +from fbclient.common_types import FBEvent +from fbclient.config import Config +from fbclient.event_types import (EventMessage, FlagEvent, MessageType, + MetricEvent, UserEvent) +from fbclient.interfaces import EventProcessor, Sender +from fbclient.utils import log +from fbclient.utils.repeatable_task import RepeatableTask + + +class DefaultEventProcessor(EventProcessor): + def __init__(self, config: Config, sender: Sender): + self.__inbox = Queue(maxsize=config.events_max_in_queue) + self.__closed = False + self.__lock = Lock() + EventDispatcher(config, sender, self.__inbox).start() + self.__flush_task = RepeatableTask('insight flush', config.events_flush_interval, self.flush) + self.__flush_task.start() + log.debug('insight processor is ready') + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_traceback): + self.stop() + + def __put_message_to_inbox(self, message: EventMessage) -> bool: + try: + self.__inbox.put_nowait(message) + return True + except: + if message.type == MessageType.SHUTDOWN: + # must put the shut down to inbox; + self.__inbox.put(message, block=True, timeout=None) + return True + # if it reaches here, it means the application is probably doing tons of flag + # evaluations across many threads -- so if we wait for a space in the inbox, we risk a very serious slowdown + # of the app. To avoid that, we'll just drop the event or you can increase the capacity of inbox + log.warning('FB Python SDK: Events are being produced faster than they can be processed; some events will be dropped') + return False + + def __put_message_async(self, type: MessageType, event: Optional[FBEvent] = None): + message = EventMessage(type, event, False) + if self.__put_message_to_inbox(message): + log.trace('put %s message to inbox' % str(type)) # type: ignore + + def __put_message_and_wait_terminate(self, type: MessageType, event: Optional[FBEvent] = None): + message = EventMessage(type, event, True) + if self.__put_message_to_inbox(message): + log.debug('put %s WaitTermination message to inbox' % str(type)) + message.waitForComplete() + + def send_event(self, event: FBEvent): + if not self.__closed and event: + if isinstance(event, FlagEvent): + self.__put_message_async(MessageType.FLAGS, event) + elif isinstance(event, MetricEvent): + self.__put_message_async(MessageType.METRICS, event) + elif isinstance(event, UserEvent): + self.__put_message_async(MessageType.USER, event) + else: + log.debug('ignore unknown event type') + + def flush(self): + if not self.__closed: + self.__put_message_async(MessageType.FLUSH) + + def stop(self): + with self.__lock: + if not self.__closed: + log.info('FB Python SDK: event processor is stopping') + self.__closed = True + self.__flush_task.stop() + self.flush() + self.__put_message_and_wait_terminate(MessageType.SHUTDOWN) + + +class EventDispatcher(Thread): + + __MAX_FLUSH_WORKERS_NUMBER = 5 + __BATCH_SIZE = 50 + + def __init__(self, config: Config, sender: Sender, inbox: "Queue[EventMessage]"): + super().__init__(daemon=True) + self.__config = config + self.__inbox = inbox + self.__closed = False + self.__sender = sender + self.__events_buffer_to_next_flush = [] + self.__flush_workers = ThreadPoolExecutor(max_workers=self.__MAX_FLUSH_WORKERS_NUMBER) + self.__permits = BoundedSemaphore(value=self.__MAX_FLUSH_WORKERS_NUMBER) + + # blocks until at least one message is available and then: + # 1: transfer the events to event buffer + # 2: try to flush events to featureflag if a flush message arrives + # 3: wait for releasing resources if a shutdown arrives + def run(self): + log.debug('event dispatcher is working...') + while True: + try: + msgs = self.__drain_inbox(size=self.__BATCH_SIZE) + for msg in msgs: + try: + if msg.type == MessageType.FLAGS or msg.type == MessageType.METRICS or msg.type == MessageType.USER: + self.__put_events_to_buffer(msg.event) # type: ignore + elif msg.type == MessageType.FLUSH: + self.__trigger_flush() + elif msg.type == MessageType.SHUTDOWN: + self.__shutdown() + msg.completed() + return # exit the loop + msg.completed() + except Exception as inner: + log.exception('FB Python SDK: unexpected error in event dispatcher: %s' % str(inner)) + except Exception as outer: + log.exception('FB Python SDK: unexpected error in event dispatcher: %s' % str(outer)) + + def __drain_inbox(self, size=50) -> List[EventMessage]: + msg = self.__inbox.get(block=True, timeout=None) + msgs = [msg] + for _ in range(size - 1): + try: + msg = self.__inbox.get_nowait() + msgs.append(msg) + except Empty: + break + return msgs + + def __put_events_to_buffer(self, event: FBEvent): + if not self.__closed and event.is_send_event: + log.debug('put event to buffer') + self.__events_buffer_to_next_flush.append(event) + + def __trigger_flush(self): + if not self.__closed and len(self.__events_buffer_to_next_flush) > 0: + log.debug('trigger flush') + # get all the current events from event buffer + if self.__permits.acquire(blocking=False): + payloads = [] + payloads.extend(self.__events_buffer_to_next_flush) + # get an available flush worker to send events + self.__flush_workers \ + .submit(FlushPayloadRunner(self.__config, self.__sender, payloads).run) \ + .add_done_callback(lambda x: self.__permits.release()) + # clear the buffer for the next flush + self.__events_buffer_to_next_flush.clear() + # if no available flush worker, keep the events in the buffer + + def __shutdown(self): + if not self.__closed: + try: + log.debug('event dispatcher is cleaning up thread and conn pool') + self.__closed = True + log.debug('flush worker pool is stopping...') + self.__flush_workers.shutdown(wait=True) + self.__sender.stop() + except Exception as e: + log.exception('FB Python SDK: unexpected error when closing event dispatcher: %s' % str(e)) + + +class FlushPayloadRunner: + __MAX_EVENT_SIZE_PER_REQUEST = 50 + + def __init__(self, config: Config, sender: Sender, payloads: List[FBEvent]): + self.__config = config + self.__sender = sender + self.__payloads = payloads + + def run(self) -> bool: + def partition(lst: List, size: int): + for i in range(0, len(lst), size): + yield lst[i : i + size] + try: + for payload in list(partition(self.__payloads, self.__MAX_EVENT_SIZE_PER_REQUEST)): + payload_part = [event.to_json_dict() for event in payload] + json_str = json.dumps(payload_part) + log.trace(json_str) # type: ignore + self.__sender.postJson(self.__config.events_uri, json_str, fetch_response=False) + log.debug('paload size: %s' % len(payload_part)) + except Exception as e: + log.exception('FB Python SDK: unexpected error in sending payload: %s' % str(e)) + return False + return True + + +class NullEventProcessor(EventProcessor): + def __init__(self, config: Config, sender: Sender): + pass + + def send_event(self, event: FBEvent): + pass + + def flush(self): + pass + + def stop(self): + pass diff --git a/fbclient/event_types.py b/fbclient/event_types.py new file mode 100644 index 0000000..37611dc --- /dev/null +++ b/fbclient/event_types.py @@ -0,0 +1,163 @@ +from enum import Enum +from threading import Event +from time import time +from typing import Optional + +from fbclient.common_types import _EvalResult, FBEvent, FBUser + + +class MessageType(Enum): + FLAGS = 0 + FLUSH = 1 + SHUTDOWN = 2 + METRICS = 3 + USER = 4 + STATISTICS = 5 + + +class EventMessage: + def __init__(self, type: MessageType, event: Optional[FBEvent], await_termination: bool): + self.__type = type + self.__event = event + self.__wait_lock = Event() if await_termination else None + + def completed(self): + if self.__wait_lock: + self.__wait_lock.set() + + def waitForComplete(self): + if self.__wait_lock: + self.__wait_lock.wait() + + @property + def type(self) -> MessageType: + return self.__type + + @property + def event(self) -> Optional[FBEvent]: + return self.__event + + +class FlagEventVariation: + def __init__(self, variation: _EvalResult): + self.__variation = variation + self.__timestamp = int(round(time() * 1000)) + + @property + def variation(self) -> _EvalResult: + return self.__variation + + @property + def timestamp(self) -> int: + return self.__timestamp + + +class Metric: + def __init__(self, event_name: str, value: float): + self.__event_name = event_name + self.__value = value + self.__route = 'index/metric' + self.__type = 'CustomEvent' + self.__app_type = 'pythonserverside' + self.__timestamp = int(round(time() * 1000)) + + @property + def value(self) -> float: + return self.__value + + @property + def event_name(self) -> str: + return self.__event_name + + @property + def route(self) -> str: + return self.__route + + @property + def type(self) -> str: + return self.__type + + @property + def app_type(self) -> str: + return self.__app_type + + @property + def timestamp(self) -> int: + return self.__timestamp + + +class UserEvent(FBEvent): + def __init__(self, user: FBUser): + super().__init__(user) + + def add(self, *elements) -> FBEvent: + return self + + @property + def is_send_event(self) -> bool: + return self._user is not None + + def to_json_dict(self) -> dict: + return {'user': self._user.to_json_dict()} + + +class FlagEvent(FBEvent): + def __init__(self, user: FBUser): + super().__init__(user) + self.__variations = [] + + def add(self, *elements) -> FBEvent: + for element in elements: + if isinstance(element, FlagEventVariation) and element.variation.is_success: + self.__variations.append(element) + return self + + @property + def is_send_event(self) -> bool: + return self._user and len(self.__variations) > 0 + + def to_json_dict(self) -> dict: + json_dict = {'user': self._user.to_json_dict()} + arr = [] + for variation in self.__variations: + flag_json_dict = {'featureFlagKey': variation.variation.key_name, + 'sendToExperiment': variation.variation.is_send_to_expt, + 'timestamp': variation.timestamp, + 'variation': { + 'id': variation.variation.id, + 'value': variation.variation.value, + 'reason': variation.variation.reason + }} + arr.append(flag_json_dict) + json_dict['variations'] = arr # type: ignore + return json_dict + + +class MetricEvent(FBEvent): + def __init__(self, user: FBUser): + super().__init__(user) + self.__metrics = [] + + def add(self, *elements) -> FBEvent: + for element in elements: + if isinstance(element, Metric): + self.__metrics.append(element) + return self + + @property + def is_send_event(self) -> bool: + return self._user and len(self.__metrics) > 0 + + def to_json_dict(self) -> dict: + json_dict = {'user': self._user.to_json_dict()} + arr = [] + for metric in self.__metrics: + metric_json_dict = {'eventName': metric.event_name, + 'numericValue': metric.value, + 'route': metric.route, + 'type': metric.type, + 'appType': metric.app_type, + 'timestamp': metric.timestamp} + arr.append(metric_json_dict) + json_dict['metrics'] = arr # type: ignore + return json_dict diff --git a/fbclient/interfaces.py b/fbclient/interfaces.py new file mode 100644 index 0000000..20adcdb --- /dev/null +++ b/fbclient/interfaces.py @@ -0,0 +1,267 @@ +from abc import ABC, abstractmethod +from typing import Mapping, Optional + +from fbclient.category import Category +from fbclient.common_types import FBEvent +from fbclient.status_types import State + + +class UpdateProcessor(ABC): + ''' + Interface for the component that obtains feature flag data in some way and passes it to a + :class:`DataStorage`. The built-in imp now of this is streaming, we will provide the polling update + and file update soon + ''' + + @abstractmethod + def start(self): + ''' + Starts an operation in the background. + ''' + pass + + @abstractmethod + def stop(self): + ''' + Stops an operation running in the background. + ''' + pass + + @property + @abstractmethod + def initialized(self) -> bool: + """ + Returns whether the update processor has received feature flags/values and has initialized its storage. + """ + pass + + +class DataUpdateStatusProvider(ABC): + ''' + Interface for manipulating the data updates in :class: `DataStorage` and maintain the status of :class: `UpdateProcessor`. + The implementation should be called in :class: `UpdateProcessor`. + ''' + + @abstractmethod + def init(self, all_data: Mapping[Category, Mapping[str, dict]], version: int = 0) -> bool: + """ + Manipulate the init operation in data storage. If the underlying data storage throws an error during this operation, the SDK will catch it, log it, + and set the state of update process to INTERRUPTED. It will not rethrow the error to other level, but will simply return falseto indicate that the operation failed. + + :param all_data: all data to be stored + :param version: the version of this data set + :return: True if the update succeeded + """ + pass + + @abstractmethod + def upsert(self, kind: Category, key: str, item: dict, version: int = 0) -> bool: + """ + Manipuate the upsert operation in data storage. If the underlying data storage throws an error during this operation, the SDK will catch it, log it, + and set the state of update process to INTERRUPTED. It will not rethrow the error to other level, but will simply return falseto indicate that the operation failed. + + :param kind: The kind of data to update + :param key: The unique key of the data + :param item: The data to update or insert + :param version: the version of this data set + :return: True if the update succeeded + """ + pass + + @property + @abstractmethod + def initialized(self) -> bool: + """ + Returns whether the storage has been initialized yet or not + """ + pass + + @property + @abstractmethod + def latest_version(self) -> int: + """ + return the latest version of the data storage + """ + pass + + @property + @abstractmethod + def current_state(self) -> State: + """ + Returns the current status of the update processing + All of the :class:`UpdateProcessor` implementations should update this status, + whenever they successfully initialize, encounter an error, or recover after an error. + If not, the status will always be reported as INITIALIZING. + + :return: the current state + """ + pass + + @abstractmethod + def update_state(self, new_state: State): + """ + Informs the SDK of a change of status in the update processing. Implementations should use this method, + if they have any concept of being in a valid state, a temporarily disconnected state, or a permanently stopped state. + If ``new_state`` is different from the previous state, and/or ``error`` is non-null, the SDK will start returning the new status + (adding a timestamp for the change). + A special case is that if ``new_state`` is INTERRUPTED, but the previous state was INITIALIZING, the state will remain at INITIALIZING + because INTERRUPTED is only meaningful after a successful startup. + + :param new_state: the new state of update processing + """ + pass + + @abstractmethod + def wait_for_OKState(self, timeout: float) -> bool: + """ + A method for waiting for a OK state arival + If the current state is already OK when this method is called, it immediately returns. + Otherwise, it blocks until 1. the state has become OK, 2. the state has become + OFF, 3. the specified timeout elapses, 4. the current thread is interrupted by some reason + + :param timeout: the maximum amount of time to wait or to block indefinitely if the timeout is zero or negative. + :return: True if the state is OK; False if the state is OFF or timeout elapses + """ + pass + + +class DataStorage(ABC): + """ + Interface for a versioned store for feature flags and related data received from feature flag center. + Implementations should permit concurrent access and updates. + This is an internal interface only implemented and used in SDK + + An "data", for ``DataStorage``, is simply a dict of data which must have at least + three properties: ``id`` (its unique key), ``version`` or ``timestamp``(the version number provided by + feature flag center), and ``isArchived`` (True if this is a placeholder for a deleted data). + + init and upsert requests are version-based: if the version number in the request is less than + the currently stored version of the data, the call should be ignored. + """ + + @abstractmethod + def get(self, kind: Category, key: str) -> Optional[dict]: + """ + Retrieves the data to which the specified key is mapped, or None if the key is not found + or the associated data has a ``isArchived`` property of True. + + :param kind: The kind of data to get + :param key: The key whose associated data is to be returned + :return: the data received from feature flag center + """ + + @abstractmethod + def get_all(self, kind: Category) -> Mapping[str, dict]: + """ + Retrieves a dictionary of all associated data of a given kind except the data with a ``isArchived`` property of True. + + :param kind: The kind of data to get + :return: the data received from feature flag center + """ + pass + + @abstractmethod + def init(self, all_data: Mapping[Category, Mapping[str, dict]], version: int = 0): + """ + Init (or re-init by data update process) the storage with the specified set of data. + Any existing entries will be removed if the new data set's version is greater than the current version + + + :param all_data: All data to be stored + :param version: the version of this data set + """ + pass + + @abstractmethod + def upsert(self, kind: Category, key: str, item: dict, version: int = 0): + """ + Updates or inserts the data associated with the specified key. If an item with the same key + already exists, it should update it only if the new item's ``version`` property is greater than the current version + + :param kind: The kind of data to update + :param key: The unique key of the data + :param item: The data to update or insert + :param version: the version of this data set + """ + pass + + @property + @abstractmethod + def initialized(self) -> bool: + """ + Returns whether the storage has been initialized yet or not + """ + pass + + @property + @abstractmethod + def latest_version(self) -> int: + """ + Returns the latest version of this date storage + """ + pass + + @abstractmethod + def stop(self): + """ + Shuts down the date storage + """ + pass + + +class EventProcessor(ABC): + """ + Interface for a component to send analytics events. + """ + + @abstractmethod + def send_event(self, event: FBEvent): + """ + Processes an event to be sent at some point. + + :param event: The event to send + """ + pass + + @abstractmethod + def flush(self): + """ + Specifies that any buffered events should be sent as soon as possible, rather than waiting + for the next flush interval. This method is asynchronous, so events still may not be sent + until a later time. However, calling ``stop()`` will synchronously deliver any events that were + not yet delivered prior to shutting down. + """ + pass + + @abstractmethod + def stop(self): + """ + Shuts down the event processor after first delivering all pending events. + """ + pass + + +class Sender(ABC): + """ + interface for a component to send request to feature flag center. It's mainly internal use, for example sending events, requesting + the latest version of feature flags, etc. + """ + + @abstractmethod + def postJson(self, url: str, json_str: str, fetch_response: bool = True) -> Optional[str]: + """ + Sends a json object via HTTP POST to the given URL + + :param url: The URL to send the json + :param json_str: The json to send data + :param fetch_response: Whether to fetch the response or not + :return: The response, normally it'a json string or None; if fetch_response is False, return None + """ + pass + + @abstractmethod + def stop(self): + """ + Shuts down the connection to feature flag center + """ + pass diff --git a/fbclient/status.py b/fbclient/status.py new file mode 100644 index 0000000..60e424e --- /dev/null +++ b/fbclient/status.py @@ -0,0 +1,89 @@ +import threading +from time import time +from typing import Mapping + +from fbclient.category import Category +from fbclient.interfaces import DataStorage, DataUpdateStatusProvider +from fbclient.status_types import (DATA_STORAGE_INIT_ERROR, + DATA_STORAGE_UPDATE_ERROR, State, StateType) +from fbclient.utils import log + + +class DataUpdateStatusProviderIml(DataUpdateStatusProvider): + + def __init__(self, storage: DataStorage): + self.__storage = storage + self.__current_state = State.intializing_state() + self.__lock = threading.Condition(threading.Lock()) + + def init(self, all_data: Mapping[Category, Mapping[str, dict]], version: int = 0) -> bool: + try: + self.__storage.init(all_data, version) + except Exception as e: + self.__handle_exception(e, DATA_STORAGE_INIT_ERROR, str(e)) + return False + return True + + def upsert(self, kind: Category, key: str, item: dict, version: int = 0) -> bool: + try: + self.__storage.upsert(kind, key, item, version) + except Exception as e: + self.__handle_exception(e, DATA_STORAGE_UPDATE_ERROR, str(e)) + return False + return True + + def __handle_exception(self, error: Exception, error_type: str, message: str): + log.exception('FB Python SDK: Data Storage error: %s, UpdateProcessor will attempt to receive the data' % str(error)) + self.update_state(State.interrupted_state(error_type, message)) + + @property + def initialized(self) -> bool: + return self.__storage.initialized + + @property + def latest_version(self) -> int: + return self.__storage.latest_version + + @property + def current_state(self) -> State: + with self.__lock: + return self.__current_state + + def update_state(self, new_state: State): + if not new_state: + return + with self.__lock: + old_state_type = self.__current_state.state_type + new_state_type = new_state.state_type + error = new_state.error_track + # special case: if ``new_state`` is INTERRUPTED, but the previous state was INITIALIZING, the state will remain at INITIALIZING + # INTERRUPTED is only meaningful after a successful startup + if new_state_type == StateType.INTERRUPTED and old_state_type == StateType.INITIALIZING: + new_state_type = StateType.INITIALIZING + + # normal case + if new_state_type != old_state_type or error is not None: + state_since = time() if new_state_type != old_state_type else self.__current_state.state_since + self.__current_state = State(new_state_type, state_since, error) + # wakes up all threads waiting for the ok state to check the new state + self.__lock.notify_all() + + def wait_for_OKState(self, timeout: float = 0) -> bool: + _timeout = 0 if timeout is None or timeout <= 0 else timeout + deadline = time() + _timeout + with self.__lock: + while True: + if StateType.OK == self.__current_state.state_type: + return True + elif StateType.OFF == self.__current_state.state_type: + return False + else: + if (_timeout == 0): + self.__lock.wait() + else: + now = time() + if now >= deadline: + return False + else: + delay = deadline - now + self.__lock.wait(delay + 0.001) diff --git a/fbclient/status_types.py b/fbclient/status_types.py new file mode 100644 index 0000000..4fbe435 --- /dev/null +++ b/fbclient/status_types.py @@ -0,0 +1,106 @@ +from enum import Enum +from time import time +from typing import Optional + +DATA_STORAGE_INIT_ERROR = 'Data Storage init error' + +DATA_STORAGE_UPDATE_ERROR = 'Data Storage update error' + +REQUEST_INVALID_ERROR = 'Request invalid' + +DATA_INVALID_ERROR = 'Received Data invalid' + +NETWORK_ERROR = 'Network error' + +RUNTIME_ERROR = 'Runtime error' + +WEBSOCKET_ERROR = 'WebSocket error' + +UNKNOWN_ERROR = 'Unknown error' + +UNKNOWN_CLOSE_CODE = 'Unknown close code' + +SYSTEM_ERROR = 'System error' + +SYSTEM_QUIT = 'System quit' + + +class StateType(Enum): + """ + The initial state of the update processing when the SDK is being initialized. + If it encounters an error that requires it to retry initialization, the state will remain at + INITIALIZING until it either succeeds and becomes OK, or permanently fails and becomes OFF. + """ + INITIALIZING = 1 + """ + Indicates that the update processing is currently operational and has not had any problems since the + last time it received data. + In streaming mode, this means that there is currently an open stream connection and that at least + one initial message has been received on the stream. + """ + OK = 2 + """ + Indicates that the update processing encountered an error that it will attempt to recover from. + In streaming mode, this means that the stream connection failed, or had to be dropped due to some + other error, and will be retried after a backoff delay. + """ + INTERRUPTED = 3 + """ + Indicates that the update processing has been permanently shut down. + This could be because it encountered an unrecoverable error or because the SDK client was + explicitly shut down. + """ + OFF = 4 + + +class ErrorTrack: + def __init__(self, error_type: str, message: str): + self.__error_type = error_type + self.__message = message + + @property + def error_type(self) -> str: + return self.__error_type + + @property + def message(self) -> str: + return self.__message + + +class State: + def __init__(self, state_type: "StateType", state_since: float, error_track: Optional["ErrorTrack"] = None): + self.__state_type = state_type + self.__state_since = state_since + self.__error_track = error_track + + @property + def state_type(self) -> "StateType": + return self.__state_type + + @property + def state_since(self) -> float: + return self.__state_since + + @property + def error_track(self) -> Optional["ErrorTrack"]: + return self.__error_track + + @staticmethod + def intializing_state() -> "State": + return State(StateType.INITIALIZING, time()) + + @staticmethod + def ok_state() -> "State": + return State(StateType.OK, time()) + + @staticmethod + def interrupted_state(error_type: str, message: str) -> "State": + return State(StateType.INTERRUPTED, time(), ErrorTrack(error_type, message)) + + @staticmethod + def normal_off_state() -> "State": + return State(StateType.OFF, time()) + + @staticmethod + def error_off_state(error_type: str, message: str) -> "State": + return State(StateType.OFF, time(), ErrorTrack(error_type, message)) diff --git a/fbclient/streaming.py b/fbclient/streaming.py new file mode 100644 index 0000000..d1da72c --- /dev/null +++ b/fbclient/streaming.py @@ -0,0 +1,231 @@ +import json +from threading import Event, Thread +from time import sleep +from typing import Optional, Tuple + +import websocket +from websocket._exceptions import WebSocketException + +from fbclient.category import FEATURE_FLAGS, SEGMENTS +from fbclient.config import Config +from fbclient.interfaces import DataUpdateStatusProvider, UpdateProcessor +from fbclient.status_types import (DATA_INVALID_ERROR, NETWORK_ERROR, + REQUEST_INVALID_ERROR, RUNTIME_ERROR, + SYSTEM_QUIT, UNKNOWN_CLOSE_CODE, + UNKNOWN_ERROR, WEBSOCKET_ERROR, State) +from fbclient.utils import (build_headers, build_token, + from_str_datetime_to_millis, log, valide_all_data) +from fbclient.utils.exponential_backoff_jitter_strategy import \ + BackoffAndJitterStrategy +from fbclient.utils.repeatable_task import RepeatableTask + +WS_NORMAL_CLOSE = 1000 + +WS_GOING_AWAY_CLOSE = 1001 + +WS_INVALID_REQUEST_CLOSE = 4003 + + +class _SelfClosed: + def __init__(self, + is_self_close: bool = False, + is_reconn: bool = False, + state: Optional[State] = None): + self.is_self_close = is_self_close + self.is_reconn = is_reconn + self.state = state + + def __call__(self): + return self.is_self_close + + +def _data_to_dict(data: dict) -> tuple[int, dict]: + version = 0 + all_data = {} + flags = {} + segments = {} + all_data[FEATURE_FLAGS] = flags + all_data[SEGMENTS] = segments + for flag in data['featureFlags']: + flag['timestamp'] = from_str_datetime_to_millis(flag['updatedAt']) + flag['variationMap'] = dict((var['id'], var['value']) for var in flag['variations']) + flag['_id'] = flag['id'] + flag['id'] = flag['key'] + flags[flag['id']] = {'id': flag['id'], 'timestamp': flag['timestamp'], 'isArchived': True} if flag['isArchived'] else flag + version = max(version, flag['timestamp']) + for segment in data['segments']: + segment['timestamp'] = from_str_datetime_to_millis(segment['updatedAt']) + segments[segment['id']] = {'id': segment['id'], 'timestamp': segment['timestamp'], 'isArchived': True} if segment['isArchived'] else segment + version = max(version, segment['timestamp']) + return version, all_data + + +def _handle_ws_error(error: BaseException) -> Tuple[bool, bool, State]: + if isinstance(error, WebSocketException): + return True, False, State.interrupted_state(WEBSOCKET_ERROR, str(error)) + if isinstance(error, ConnectionError): + return True, False, State.interrupted_state(NETWORK_ERROR, str(error)) + # internal use for test + if isinstance(error, (KeyboardInterrupt, SystemExit)): + return False, False, State.error_off_state(SYSTEM_QUIT, str(error)) + # an unexpected error occurs when the custom action is called, close ws connection to jump ws client forever loop + return True, True, State.interrupted_state(RUNTIME_ERROR, str(error)) + + +class Streaming(Thread, UpdateProcessor): + __ping_interval = 10.0 + + def __init__(self, config: Config, dataUpdateStatusProvider: DataUpdateStatusProvider, ready: Event): + super().__init__(daemon=True) + self.__config = config + self.__storage = dataUpdateStatusProvider + self.__ready = ready + self.__running = True + self.__strategy = BackoffAndJitterStrategy(config.streaming_first_retry_delay) + self.__wsapp = None + self.__self_closed = _SelfClosed() + self.__closed_by_error = False + self.__force_close = False + self.__has_network = not config.is_offline + if self.__has_network: + self.__ping_task = RepeatableTask('streaming ping', self.__ping_interval, self._on_ping) + self.__ping_task.start() + + def _init_wsapp(self): + + # authenfication and headers + token = build_token(self.__config.env_secret) + params = '?token=%s&type=server' % token + url = self.__config.streaming_uri + params + headers = build_headers(self.__config.env_secret) + + # a timeout is triggered if no connection response is received + websocket.setdefaulttimeout(self.__config.websocket.timeout) + # init web socket app + self.__wsapp = websocket.WebSocketApp(url, + header=headers, + on_open=self._on_open, + on_message=self._on_message, + on_close=self._on_close, + on_error=self._on_error) + # set the conn time + self.__strategy.set_good_run() + log.debug('Streaming WebSocket is connecting...') + + def run(self): + while (not self.__force_close and self.__running and self.__has_network): + try: + self._init_wsapp() + self.__wsapp.run_forever(sslopt=self.__config.websocket.sslopt, # type: ignore + http_proxy_host=self.__config.websocket.proxy_host, + http_proxy_port=self.__config.websocket.proxy_port, + http_proxy_auth=self.__config.websocket.proxy_auth, + proxy_type=self.__config.websocket.proxy_type, + skip_utf8_validation=self.__config.websocket.skip_utf8_validation) + if self.__running: + # calculate the delay for reconn + delay = self.__strategy.next_delay() + sleep(delay) + except Exception as e: + log.exception('FB Python SDK: Streaming unexpected error: %s', str(e)) + self.__storage.update_state(State.error_off_state(UNKNOWN_ERROR, str(e))) + finally: + # clear the last connection state + self.__wsapp = None + self.__self_closed = _SelfClosed() + self.__closed_by_error = False + log.debug('Streaming WebSocket process is over') + if not self.__ready.is_set(): + # if an error like data format invalid occurs in the first attempt, set ready to make client not to wait + self.__ready.set() + + # handle websocket auto close issue + def _on_ping(self): + if self.__wsapp and self.__wsapp.sock and self.__wsapp.sock.connected: + log.trace('ping') # type: ignore + self.__wsapp.send(json.dumps({'messageType': 'ping', 'data': None})) + + def _on_close(self, wsapp, close_code, close_msg): + if self.__self_closed(): + # close by client + self.__running = self.__self_closed.is_reconn + state = self.__self_closed.state + log.debug('Streaming WebSocket close reason: self close') + elif self.__closed_by_error: + return + elif close_code == WS_INVALID_REQUEST_CLOSE: + # close by server with code 4003 + self.__running = False + state = State.error_off_state(REQUEST_INVALID_ERROR, REQUEST_INVALID_ERROR) + log.debug('Streaming WebSocket close reason: %s' % REQUEST_INVALID_ERROR) + else: + # close by server with an unknown close code, restart immediately + self.__running = True + msg = close_msg if close_msg else UNKNOWN_CLOSE_CODE + state = State.interrupted_state(UNKNOWN_CLOSE_CODE, msg) + log.debug('Streaming WebSocket close reason: %s' % close_code) + + if state: + self.__storage.update_state(state) + + def _on_error(self, wsapp: websocket.WebSocketApp, error): + is_reconn, is_close_ws, state = _handle_ws_error(error) + log.warning('FB Python SDK: Streaming WebSocket Failure: %s' % str(error)) + if is_close_ws: + self.__self_closed = _SelfClosed(is_self_close=True, is_reconn=is_reconn, state=state) + wsapp.close(status=WS_GOING_AWAY_CLOSE) + else: + self.__running = is_reconn + self.__closed_by_error = True + self.__storage.update_state(state) + + def _on_open(self, wsapp: websocket.WebSocketApp): + log.debug('Asking Data updating on WebSocket') + version = self.__storage.latest_version if self.__storage.latest_version > 0 else 0 + data_sync_msg = {'messageType': 'data-sync', 'data': {'timestamp': version}} + json_str = json.dumps(data_sync_msg) + wsapp.send(json_str) + + def _on_process_data(self, data): + + log.debug('Streaming WebSocket is processing data') + version, all_data = _data_to_dict(data) + op_ok = False + if 'patch' == data['eventType']: + op_ok = all(self.__storage.upsert(cat, item['id'], item, item['timestamp']) + for cat, items in all_data.items() for item in sorted(items.values(), key=lambda x: x['timestamp'])) + else: + op_ok = self.__storage.init(all_data, version) + if op_ok: + if not self.__ready.is_set(): + # set ready when the initialization is complete. + self.__ready.set() + self.__storage.update_state(State.ok_state()) + log.debug("processing data is well done") + return op_ok + + def _on_message(self, wsapp: websocket.WebSocketApp, msg): + log.trace('Streaming WebSocket data: %s' % msg) # type: ignore + try: + all_data = json.loads(msg) + if valide_all_data(all_data) and not self._on_process_data(all_data['data']) and self.__wsapp: + # state already updated in init or upsert, just reconn + self.__self_closed = _SelfClosed(is_self_close=True, is_reconn=True, state=None) + wsapp.close(status=WS_GOING_AWAY_CLOSE) + except Exception as e: + if isinstance(e, json.JSONDecodeError): + self.__self_closed = _SelfClosed(is_self_close=True, is_reconn=False, state=State.error_off_state(DATA_INVALID_ERROR, str(e))) + wsapp.close(status=WS_GOING_AWAY_CLOSE) + + def stop(self): + log.info('FB Python SDK: Streaming is stopping...') + self.__force_close = True + if self.__running and self.__wsapp: + self.__self_closed = _SelfClosed(is_self_close=True, is_reconn=False, state=State.normal_off_state()) + self.__wsapp.close(status=WS_NORMAL_CLOSE) + if self.__has_network: + self.__ping_task.stop() + + @property + def initialized(self) -> bool: + return self.__ready.is_set() and self.__storage.initialized diff --git a/fbclient/update_processor.py b/fbclient/update_processor.py new file mode 100644 index 0000000..0689b8d --- /dev/null +++ b/fbclient/update_processor.py @@ -0,0 +1,19 @@ +from threading import Event +from fbclient.config import Config +from fbclient.interfaces import DataUpdateStatusProvider, UpdateProcessor + + +class NullUpdateProcessor(UpdateProcessor): + + def __init__(self, config: Config, dataUpdateStatusProvider: DataUpdateStatusProvider, ready: Event): + self.__ready = ready + + def start(self): + self.__ready.set() + + def stop(self): + pass + + @property + def initialized(self) -> bool: + return self.__ready.is_set() diff --git a/fbclient/utils/__init__.py b/fbclient/utils/__init__.py new file mode 100644 index 0000000..b904d2a --- /dev/null +++ b/fbclient/utils/__init__.py @@ -0,0 +1,148 @@ +import json +import logging +import sys +from math import floor +from random import random +from time import time +from typing import Any, Iterable, Mapping, Optional +from urllib.parse import urlparse + +from dateutil.parser import isoparse + +TRACE_LEVEL = logging.DEBUG - 5 + +logging.addLevelName(TRACE_LEVEL, 'TRACE') + + +class _MyLogger(logging.getLoggerClass()): + def trace(self, msg, *args, **kwargs): + if self.isEnabledFor(TRACE_LEVEL): + self._log(TRACE_LEVEL, msg, args, **kwargs) + + +logging.setLoggerClass(_MyLogger) + +logging.getLogger("schedule").setLevel(logging.ERROR) + +log = logging.getLogger(sys.modules[__name__].__name__) + +ALPHABETS = {"0": "Q", "1": "B", "2": "W", "3": "S", "4": "P", "5": "H", "6": "D", "7": "X", "8": "Z", "9": "U"} + + +def build_headers(env_secret: str, extra_headers={}): + + def build_default_headers(): + return { + 'Authorization': env_secret, + 'User-Agent': 'fb-python-server-sdk', + 'Content-Type': 'application/json' + } + + headers = build_default_headers() + headers.update(extra_headers) + return headers + + +def build_token(env_secret: str) -> str: + + def encodeNumber(num, length): + s = "000000000000" + str(num) + return ''.join(list(map(lambda ch: ALPHABETS[ch], s[len(s) - length:]))) + + text = env_secret.rstrip("=") + now = unix_timestamp_in_milliseconds() + timestampCode = encodeNumber(now, len(str(now))) + start = max(floor(random() * len(text)), 2) + part1 = encodeNumber(start, 3) + part2 = encodeNumber(len(timestampCode), 2) + part3 = text[0:start] + part4 = timestampCode + part5 = text[start:] + return '%s%s%s%s%s' % (part1, part2, part3, part4, part5) + + +def unix_timestamp_in_milliseconds(): + return int(round(time() * 1000)) + + +def valide_all_data(all_data={}) -> bool: + return isinstance(all_data, dict) \ + and all_data.get('messageType', 'pong') == 'data-sync' \ + and 'data' in all_data and isinstance(all_data['data'], dict) \ + and all(k in all_data['data'] for k in ('eventType', 'featureFlags', 'segments')) \ + and any(k == all_data['data']['eventType'] for k in ('full', 'patch')) \ + and isinstance(all_data['data']['featureFlags'], Iterable) \ + and isinstance(all_data['data']['segments'], Iterable) + + +def is_ascii(value: str) -> bool: + if value and isinstance(value, str): + return len(value) == len(value.encode()) + return False + + +def is_url(value: str) -> bool: + try: + result = urlparse(value) + return all([result.scheme, result.netloc]) + except: + return False + + +def check_uwsgi(): + if 'uwsgi' in sys.modules: + # noinspection PyPackageRequirements,PyUnresolvedReferences + import uwsgi + if not hasattr(uwsgi, 'opt'): + # means that we are not running under uwsgi + return + + if uwsgi.opt.get('enable-threads'): + return + if uwsgi.opt.get('threads') is not None and int(uwsgi.opt.get('threads')) > 1: + return + raise ValueError("The Python Server SDK requires the 'enable-threads' or 'threads' option be passed to uWSGI.") + + +def is_numeric(value) -> bool: + try: + float(str(value)) + return True + except ValueError: + return False + + +def from_str_datetime_to_millis(value: str) -> int: + try: + return int(round(isoparse(value).timestamp() * 1000)) + except: + return 0 + + +def cast_variation_by_flag_type(flag_type: Optional[str], variation: Optional[str]) -> Any: + try: + if 'boolean' == flag_type or 'json' == flag_type: + return json.loads(variation) # type: ignore + elif 'number' == flag_type: + float_value = float(variation) # type: ignore + return int(float_value) if float_value.is_integer() else float_value + else: + return variation + except: + return variation + + +def simple_type_inference(value: Any) -> Optional[str]: + try: + if isinstance(value, bool): + return 'boolean' + elif isinstance(value, str): + return 'string' + elif isinstance(value, Iterable) or isinstance(value, Mapping): + return 'json' + elif is_numeric(str(value)): + return 'number' + else: + return None + except: + return None diff --git a/fbclient/utils/exceptions.py b/fbclient/utils/exceptions.py new file mode 100644 index 0000000..220d7e1 --- /dev/null +++ b/fbclient/utils/exceptions.py @@ -0,0 +1,15 @@ +class UnSucessfulHttpRequestException(Exception): + + def __init__(self, status): + super().__init__('http request error = %d' % status) + self.__status = status + + @property + def status(self) -> int: + return self.__status + + +class DataNotValidException(Exception): + + def __init__(self, msg): + super().__init__(msg) diff --git a/fbclient/utils/exponential_backoff_jitter_strategy.py b/fbclient/utils/exponential_backoff_jitter_strategy.py new file mode 100644 index 0000000..0d051ac --- /dev/null +++ b/fbclient/utils/exponential_backoff_jitter_strategy.py @@ -0,0 +1,51 @@ +""" +Internal helper class to implement exponential backoff algorithm for better network reconnection +Base in https://docs.aws.amazon.com/general/latest/gr/api-retries.html +""" + +import random +from time import time +from fbclient.utils import log + + +class BackoffAndJitterStrategy: + + def __init__(self, + first_delay_in_seconds: float = 1.0, + max_delay_in_seconds: float = 60.0, + reset_interval_in_seconds: float = 60, + jitter_ratio: float = 0.5): + self.__retry_times = 0 + self.__first_delay = first_delay_in_seconds + self.__reset_interval = reset_interval_in_seconds + self.__jitter_ratio = jitter_ratio + self.__latest_good_run = 0 + self.__max_delay = max_delay_in_seconds + + def set_good_run(self, current_time_in_seconds: float = 0): + if current_time_in_seconds <= 0: + self.__latest_good_run = time() + else: + self.__latest_good_run = current_time_in_seconds + + def __count_jitter_time(self, delay: float) -> float: + return delay * self.__jitter_ratio * random.random() + + def __count_backoff_time(self) -> float: + delay = self.__first_delay * (2**self.__retry_times) + return delay if delay <= self.__max_delay else self.__max_delay + + def next_delay(self, force_to_restart_in_max_delay=False): + current_time = time() + if self.__latest_good_run > 0 and self.__reset_interval > 0 and current_time - self.__latest_good_run > self.__reset_interval: + self.__retry_times = 0 + if force_to_restart_in_max_delay: + self.__retry_times = 0 + delay = self.__max_delay + else: + backoff = self.__count_backoff_time() + delay = self.__count_jitter_time(backoff) + backoff / 2 + self.__retry_times += 1 + self.__latest_good_run = 0 + log.debug('next delay is %s' % str(delay)) + return delay diff --git a/fbclient/utils/http_client.py b/fbclient/utils/http_client.py new file mode 100644 index 0000000..e7be001 --- /dev/null +++ b/fbclient/utils/http_client.py @@ -0,0 +1,149 @@ +import json +from queue import Queue +from threading import Condition, Event, Lock +from time import sleep +from typing import List, Mapping, Optional, Union + +import certifi +import urllib3 +from fbclient.config import Config, HTTPConfig +from fbclient.interfaces import Sender +from fbclient.utils import build_headers, log + + +def build_http_factory(config: Config, headers={}): + return HTTPFactory(build_headers(config.env_secret, headers), config.http) + + +class HTTPFactory: + + def __init__(self, headers, http_config: HTTPConfig): + """ + :param override_read_timeout override default read timeout at streaming update + """ + self.__headers = headers + self.__http_config = http_config + self.__timeout = urllib3.Timeout(connect=http_config.connect_timeout, read=http_config.read_timeout) + + @property + def headers(self) -> Mapping[str, str]: + return self.__headers + + @property + def http_config(self) -> HTTPConfig: + return self.__http_config + + @property + def timeout(self) -> urllib3.Timeout: + return self.__timeout + + def create_http_client(self, num_pools=1, max_size=10) -> Union[urllib3.PoolManager, urllib3.ProxyManager]: + proxy_url = self.__http_config.http_proxy + + if self.__http_config.disable_ssl_verification: + cert_reqs = 'CERT_NONE' + ca_certs = None + else: + cert_reqs = 'CERT_REQUIRED' + ca_certs = self.__http_config.ca_certs or certifi.where() + + if not proxy_url: + return urllib3.PoolManager(num_pools=num_pools, + maxsize=max_size, + headers=self.__headers, + timeout=self.__timeout, + cert_reqs=cert_reqs, + ca_certs=ca_certs) + else: + url = urllib3.util.parse_url(proxy_url) + if url.auth: + proxy_headers = urllib3.util.make_headers(proxy_basic_auth=url.auth) + elif self.__http_config.http_proxy_auth: + auth = self.__http_config.http_proxy_auth + proxy_headers = urllib3.util.make_headers(proxy_basic_auth=f"{auth[0]}:{auth[1]}") + else: + proxy_headers = None + + return urllib3.ProxyManager(proxy_url, + num_pools=num_pools, + maxsize=max_size, + headers=self.__headers, + proxy_headers=proxy_headers, + timeout=self.__timeout, + cert_reqs=cert_reqs, + ca_certs=ca_certs) + + +class DefaultSender(Sender): + + def __init__(self, name: str, config: Config, num_pools=1, max_size=10): + self.__http = build_http_factory(config).create_http_client(num_pools, max_size) + self.__retry_interval = config.events_retry_interval + self.__max_retries = config.events_max_retries + self.__name = name + + def postJson(self, url: str, json_str: str, fetch_response: bool = True) -> Optional[str]: + for i in range(self.__max_retries + 1): + try: + if i > 0: + sleep(self.__retry_interval) + response = self.__http.request('POST', url, body=json_str) + if response.status == 200: + log.debug('sending ok') + resp = response.data.decode('utf-8') + return resp if fetch_response else None + except Exception as e: + log.exception('FB Python SDK: sending error: %s' % str(e)) + return None + + def stop(self): + log.debug('%s sender is stopping...' % self.__name) + self.__http.clear() + + +class SendingJsonInfo: + def __init__(self, payloads: List[dict]) -> None: + self.__payloads = payloads + self.size = len(payloads) + + def is_contain_user(self, key: str) -> bool: + return any(payload['user']['keyId'] == key for payload in self.__payloads) + + +class MockSender(Sender): + def __init__(self, ready: Event): + self.__ready = ready + self.__lock = Condition(Lock()) + self.__buffer = Queue(maxsize=100) + self.thread_num = 0 + self.fake_error = None + self.fake_error_on_close = None + self.closed = False + + def postJson(self, url: str, json_str: str, fetch_response: bool = True) -> Optional[str]: + payloads = json.loads(json_str) + self.__buffer.put(SendingJsonInfo(payloads)) + if self.thread_num > 0 and not self.__ready.is_set(): + with self.__lock: + self.thread_num = self.thread_num - 1 + if self.thread_num <= 0: + self.__ready.set() + self.__lock.wait() + if self.fake_error is not None: + raise self.fake_error # type: ignore + return None + + def notify_locked_thread(self): + with self.__lock: + self.__lock.notify_all() + + def get_sending_json_info(self, timeout: float) -> Optional[SendingJsonInfo]: + try: + return self.__buffer.get(timeout=timeout) + except: + return None + + def stop(self): + self.closed = True + if self.fake_error_on_close is not None: + raise self.fake_error_on_close # type: ignore diff --git a/fbclient/utils/repeatable_task.py b/fbclient/utils/repeatable_task.py new file mode 100644 index 0000000..a878989 --- /dev/null +++ b/fbclient/utils/repeatable_task.py @@ -0,0 +1,37 @@ +""" +Internal helper class for a simple periodic task. +base in https://medium.com/greedygame-engineering/an-elegant-way-to-run-periodic-tasks-in-python-61b7c477b679 +""" + +from threading import Event, Thread +from time import time +from typing import Callable + +from fbclient.utils import log + + +class RepeatableTask(Thread): + + def __init__(self, name: str, interval: float, callable: Callable, args=(), kwargs=None): + super().__init__(name=name, daemon=True) + self._interval = interval + self._callable = callable + self._stop = Event() + self._args = args + self._kwargs = {} if kwargs is None else kwargs + + def stop(self): + log.info("FB Python SDK: %s repeatable task is stopping..." % self.name) + self._stop.set() + + def run(self): + log.debug("%s repeatable task is starting..." % self.name) + stopped = self._stop.is_set() + while not stopped: + next_time = time() + self._interval + try: + self._callable(*self._args, **self._kwargs) + except Exception as e: + log.exception("FB Python SDK: unexpected exception on %s repeatable task: %s" % (self.name, str(e))) + delay = next_time - time() + stopped = self._stop.wait(delay) if delay > 0 else self._stop.is_set() diff --git a/fbclient/utils/rwlock.py b/fbclient/utils/rwlock.py new file mode 100644 index 0000000..9b25973 --- /dev/null +++ b/fbclient/utils/rwlock.py @@ -0,0 +1,39 @@ +""" +Internal helper class for Read-Write Lock. +Base in https://code.activestate.com/recipes/66426-readwritelock/ +""" + +import threading + + +class ReadWriteLock: + """ A lock object that allows many simultaneous "read locks", but + only one "write lock." """ + + def __init__(self): + self._read_ready = threading.Condition(threading.Lock()) + self._readers = 0 + + def read_lock(self): + """ Acquire a read lock. Blocks only if a thread has + acquired the write lock. """ + with self._read_ready: + self._readers += 1 + + def release_read_lock(self): + """ Release a read lock. """ + with self._read_ready: + self._readers = self._readers - 1 if self._readers > 0 else 0 + if self._readers == 0: + self._read_ready.notifyAll() + + def write_lock(self): + """ Acquire a write lock. Blocks until there are no + acquired read or write locks. """ + self._read_ready.acquire() + while self._readers > 0: + self._read_ready.wait() + + def release_write_lock(self): + """ Release a write lock. """ + self._read_ready.release() diff --git a/fbclient/utils/variation_splitting_algorithm.py b/fbclient/utils/variation_splitting_algorithm.py new file mode 100644 index 0000000..d00b9a0 --- /dev/null +++ b/fbclient/utils/variation_splitting_algorithm.py @@ -0,0 +1,25 @@ + +import hashlib +from typing import List + +__MIN_INT__ = -2147483648 + + +class VariationSplittingAlgorithm: + def __init__(self, key: str, percentage_range: List[float]): + self.__key = key + self.__percentage_range = percentage_range + + def is_key_belongs_to_percentage(self) -> bool: + try: + if self.__percentage_range[0] == 0 and self.__percentage_range[1] == 1: + return True + percentage = self.__percentage_of_key() + return percentage >= self.__percentage_range[0] and percentage < self.__percentage_range[1] + except: + return False + + def __percentage_of_key(self) -> float: + digest = hashlib.md5(self.__key.encode(encoding='ascii')).digest() + magic_num = int.from_bytes(digest[:4], byteorder='little', signed=True) + return abs(magic_num / __MIN_INT__) diff --git a/fbclient/version.py b/fbclient/version.py new file mode 100644 index 0000000..3277f64 --- /dev/null +++ b/fbclient/version.py @@ -0,0 +1 @@ +VERSION = "1.0.0" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..fa7093a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools>=42"] +build-backend = "setuptools.build_meta" \ No newline at end of file diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..2078a20 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +log_format = %(asctime)s %(levelname)s %(message)s +log_date_format = %Y-%m-%d %H:%M:%S +testpaths = tests/ +addopts = -vs \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..c5511ed --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +certifi>=2018.4.16 +urllib3>=1.22.0 +python-dateutil>=2.8.2 +websocket-client>=1.0.0 \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..9c40d89 --- /dev/null +++ b/setup.py @@ -0,0 +1,59 @@ +from setuptools import setup, find_packages + +version = {} + + +def last_version(): + with open("./fbclient/version.py") as fp: + exec(fp.read(), version) + return version['VERSION'] + + +fb_version = last_version() + + +def parse_requirements(filename): + lineiter = (line.strip() for line in open(filename)) + return [line for line in lineiter if line and not line.startswith("#")] + + +base_reqs = parse_requirements('./requirements.txt') +dev_reqs = parse_requirements('./dev-requirements.txt') + +with open('README.md') as f: + long_description = f.read() + +setup( + name='fb-python-sdk', + version=fb_version, + author='Dian SUN', + author_email='featbit.master@gmail.com', + packages=find_packages(), + url='https://github.com/featbit/featbit-python-sdk', + project_urls={ + 'Code': 'https://github.com/featbit/featbit-python-sdk', + 'Issue tracker': 'https://github.com/featbit/featbit/issues', + }, + description='A Python SDK for FeatBit plateform', + long_description=long_description, + long_description_content_type='text/markdown', + install_requires=base_reqs, + classifiers=[ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Topic :: Software Development', + 'Topic :: Software Development :: Libraries', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + ], + extras_require={ + "dev": dev_reqs + }, + tests_require=dev_reqs, + python_requires='>=3.6, <=3.10' +) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/fbclient_test_data.json b/tests/fbclient_test_data.json new file mode 100644 index 0000000..afb86c9 --- /dev/null +++ b/tests/fbclient_test_data.json @@ -0,0 +1,804 @@ +{ + "messageType": "data-sync", + "data": { + "eventType": "full", + "featureFlags": [ + { + "envId": "fae873f1-fa02-4b13-b073-8771c7309655", + "name": "ff-test-seg", + "key": "ff-test-seg", + "variationType": "string", + "variations": [ + { + "id": "66b6a8f2-382a-4a9d-88c7-b15f61cd915a", + "value": "teamA" + }, + { + "id": "0ba8f62e-949f-4097-aac3-23b8edaa90a7", + "value": "teamB" + } + ], + "targetUsers": [ + { + "keyIds": [], + "variationId": "66b6a8f2-382a-4a9d-88c7-b15f61cd915a" + }, + { + "keyIds": [], + "variationId": "0ba8f62e-949f-4097-aac3-23b8edaa90a7" + } + ], + "rules": [ + { + "id": "a706bf5b-aa59-497f-b938-054d32f49261", + "name": "规则 1", + "includedInExpt": false, + "conditions": [ + { + "property": "User is in segment", + "op": null, + "value": "[\"a3ad3010-3bfa-415f-a49a-af39015670f7\"]" + } + ], + "variations": [ + { + "id": "66b6a8f2-382a-4a9d-88c7-b15f61cd915a", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + } + ], + "isEnabled": true, + "disabledVariationId": "0ba8f62e-949f-4097-aac3-23b8edaa90a7", + "fallthrough": { + "includedInExpt": true, + "variations": [ + { + "id": "0ba8f62e-949f-4097-aac3-23b8edaa90a7", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + "exptIncludeAllTargets": true, + "isArchived": false, + "creatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "updatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "createdAt": "2022-10-25T20:51:09.334Z", + "updatedAt": "2022-10-25T20:53:17.405074Z", + "id": "69185c5b-9f67-4981-94fa-af390157a3e0" + }, + { + "envId": "fae873f1-fa02-4b13-b073-8771c7309655", + "name": "ff-test-bool", + "key": "ff-test-bool", + "variationType": "boolean", + "variations": [ + { + "id": "62f6be32-1845-43fa-a8db-576cf63753c3", + "value": "true" + }, + { + "id": "59d44fad-88d8-40d9-bf46-79486e6be9b1", + "value": "false" + } + ], + "targetUsers": [ + { + "keyIds": [ + "test-user-1", + "test-user-2" + ], + "variationId": "62f6be32-1845-43fa-a8db-576cf63753c3" + }, + { + "keyIds": [], + "variationId": "59d44fad-88d8-40d9-bf46-79486e6be9b1" + } + ], + "rules": [], + "isEnabled": true, + "disabledVariationId": "59d44fad-88d8-40d9-bf46-79486e6be9b1", + "fallthrough": { + "includedInExpt": true, + "variations": [ + { + "id": "62f6be32-1845-43fa-a8db-576cf63753c3", + "rollout": [ + 0, + 0.5 + ], + "exptRollout": 0 + }, + { + "id": "59d44fad-88d8-40d9-bf46-79486e6be9b1", + "rollout": [ + 0.5, + 1 + ], + "exptRollout": 0 + } + ] + }, + "exptIncludeAllTargets": true, + "isArchived": false, + "creatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "updatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "createdAt": "2022-10-25T20:54:00.878Z", + "updatedAt": "2022-10-25T20:56:07.4183895Z", + "id": "1f517d5e-41f2-422b-8514-af3901586ce7" + }, + { + "envId": "fae873f1-fa02-4b13-b073-8771c7309655", + "name": "ff-test-number", + "key": "ff-test-number", + "variationType": "number", + "variations": [ + { + "id": "f5aebc05-1490-49e6-8c76-5e49fa113e09", + "value": "1" + }, + { + "id": "2ef08dd7-3f94-48b1-9a9c-0d38a80fdd87", + "value": "33" + }, + { + "id": "a569dae1-62b0-40ee-8c12-7f128725cd24", + "value": "86" + }, + { + "id": "a278f6b8-c105-4fb3-8ccc-27c27448f48c", + "value": "9999" + } + ], + "targetUsers": [ + { + "keyIds": [], + "variationId": "f5aebc05-1490-49e6-8c76-5e49fa113e09" + }, + { + "keyIds": [], + "variationId": "2ef08dd7-3f94-48b1-9a9c-0d38a80fdd87" + }, + { + "keyIds": [], + "variationId": "a569dae1-62b0-40ee-8c12-7f128725cd24" + }, + { + "keyIds": [], + "variationId": "a278f6b8-c105-4fb3-8ccc-27c27448f48c" + } + ], + "rules": [ + { + "id": "9600a0b4-4fe5-482e-80ed-3b87e42fd8b2", + "name": "规则 1", + "includedInExpt": false, + "conditions": [ + { + "property": "country", + "op": "Equal", + "value": "us" + } + ], + "variations": [ + { + "id": "f5aebc05-1490-49e6-8c76-5e49fa113e09", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + { + "id": "68fe322c-d3d8-4107-88fa-a31de1a48547", + "name": "规则 2", + "includedInExpt": false, + "conditions": [ + { + "property": "country", + "op": "Equal", + "value": "fr" + } + ], + "variations": [ + { + "id": "2ef08dd7-3f94-48b1-9a9c-0d38a80fdd87", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + { + "id": "c8584dbd-29a3-47ef-bbff-2f11e4d88c2b", + "name": "规则 3", + "includedInExpt": false, + "conditions": [ + { + "property": "country", + "op": "Equal", + "value": "cn" + } + ], + "variations": [ + { + "id": "a569dae1-62b0-40ee-8c12-7f128725cd24", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + } + ], + "isEnabled": true, + "disabledVariationId": "a278f6b8-c105-4fb3-8ccc-27c27448f48c", + "fallthrough": { + "includedInExpt": true, + "variations": [ + { + "id": "a278f6b8-c105-4fb3-8ccc-27c27448f48c", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + "exptIncludeAllTargets": true, + "isArchived": false, + "creatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "updatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "createdAt": "2022-10-25T20:57:00.315Z", + "updatedAt": "2022-10-25T20:59:25.073141Z", + "id": "8d1b21eb-89f9-44b3-ac4c-af3901593f2e" + }, + { + "envId": "fae873f1-fa02-4b13-b073-8771c7309655", + "name": "ff-test-string", + "key": "ff-test-string", + "variationType": "string", + "variations": [ + { + "id": "5adef11d-0607-4688-967c-73a890572ee8", + "value": "email" + }, + { + "id": "a04b6251-bf4d-4868-a73d-e1d566b71cc9", + "value": "phone number" + }, + { + "id": "73009a42-d427-4d28-802a-15c40722bd8c", + "value": "others" + } + ], + "targetUsers": [ + { + "keyIds": [], + "variationId": "5adef11d-0607-4688-967c-73a890572ee8" + }, + { + "keyIds": [], + "variationId": "a04b6251-bf4d-4868-a73d-e1d566b71cc9" + }, + { + "keyIds": [], + "variationId": "73009a42-d427-4d28-802a-15c40722bd8c" + } + ], + "rules": [ + { + "id": "29f45488-b3cc-497d-8a80-16b2c3bb7b6a", + "name": "规则 1", + "includedInExpt": false, + "conditions": [ + { + "property": "keyId", + "op": "MatchRegex", + "value": "^[a-zA-Z0-9_-]+@[a-zA-Z0-9_-]+(\\.[a-zA-Z0-9_-]+)+$" + } + ], + "variations": [ + { + "id": "5adef11d-0607-4688-967c-73a890572ee8", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + { + "id": "777f944d-c25b-44e6-b26c-d0cfc81d7b00", + "name": "规则 2", + "includedInExpt": false, + "conditions": [ + { + "property": "keyId", + "op": "MatchRegex", + "value": "^(\\+?0?86\\-?)?1[345789]\\d{9}$" + } + ], + "variations": [ + { + "id": "a04b6251-bf4d-4868-a73d-e1d566b71cc9", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + { + "id": "20e1c7b3-e5c3-4ce6-9a02-ae299fdab77c", + "name": "规则 3", + "includedInExpt": false, + "conditions": [ + { + "property": "keyId", + "op": "MatchRegex", + "value": "^(\\+?33|0)[67]\\d{8}$" + } + ], + "variations": [ + { + "id": "a04b6251-bf4d-4868-a73d-e1d566b71cc9", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + } + ], + "isEnabled": true, + "disabledVariationId": "73009a42-d427-4d28-802a-15c40722bd8c", + "fallthrough": { + "includedInExpt": true, + "variations": [ + { + "id": "73009a42-d427-4d28-802a-15c40722bd8c", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + "exptIncludeAllTargets": true, + "isArchived": false, + "creatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "updatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "createdAt": "2022-10-25T20:59:56.538Z", + "updatedAt": "2022-10-25T21:17:25.531934Z", + "id": "31f95f40-a97a-43ec-b856-af39015a0db1" + }, + { + "envId": "fae873f1-fa02-4b13-b073-8771c7309655", + "name": "ff-test-json", + "key": "ff-test-json", + "variationType": "json", + "variations": [ + { + "id": "b4c3a257-759f-429a-9bb8-0f30244a4aa5", + "value": "{\n \"code\": 200,\n \"reason\": \"you win 100 euros\"\n}" + }, + { + "id": "112a8d83-26df-4238-aadc-82836c671844", + "value": "{\n \"code\": 404,\n \"reason\": \"fail to win the lottery\"\n}" + } + ], + "targetUsers": [ + { + "keyIds": [], + "variationId": "b4c3a257-759f-429a-9bb8-0f30244a4aa5" + }, + { + "keyIds": [], + "variationId": "112a8d83-26df-4238-aadc-82836c671844" + } + ], + "rules": [], + "isEnabled": true, + "disabledVariationId": "112a8d83-26df-4238-aadc-82836c671844", + "fallthrough": { + "includedInExpt": true, + "variations": [ + { + "id": "b4c3a257-759f-429a-9bb8-0f30244a4aa5", + "rollout": [ + 0, + 0.25 + ], + "exptRollout": 0 + }, + { + "id": "112a8d83-26df-4238-aadc-82836c671844", + "rollout": [ + 0.25, + 1 + ], + "exptRollout": 0 + } + ] + }, + "exptIncludeAllTargets": true, + "isArchived": false, + "creatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "updatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "createdAt": "2022-10-25T21:18:27.175Z", + "updatedAt": "2022-10-25T21:31:05.4038972Z", + "id": "829845b2-300d-49e1-9c19-af39015f2338" + }, + { + "envId": "fae873f1-fa02-4b13-b073-8771c7309655", + "name": "ff-evaluation-test", + "key": "ff-evaluation-test", + "variationType": "string", + "variations": [ + { + "id": "7605a908-e7ec-450a-a72c-c10ad9395509", + "value": "teamA" + }, + { + "id": "e0d36071-89a1-452e-a881-8b5684753289", + "value": "teamB" + }, + { + "id": "aef687c3-41d8-40db-8710-4c1f9bac6560", + "value": "teamC" + }, + { + "id": "9253b858-c2b8-47cd-a056-14025959a1c5", + "value": "teamD" + }, + { + "id": "ef43195a-8874-44a0-aaef-f1d33c476799", + "value": "teamE" + }, + { + "id": "1b540abc-291f-4a4e-a129-19bf2101a6bc", + "value": "teamF" + }, + { + "id": "2b61a434-f29e-4913-9b4c-d22c7bf2c68d", + "value": "teamG" + }, + { + "id": "5a0fab50-c344-4b1e-b15f-c5bfae1d0087", + "value": "teamH" + }, + { + "id": "eacac1c1-190b-4822-897c-ec7867a4da85", + "value": "teamI" + } + ], + "targetUsers": [ + { + "keyIds": [], + "variationId": "7605a908-e7ec-450a-a72c-c10ad9395509" + }, + { + "keyIds": [ + "test-target-user" + ], + "variationId": "e0d36071-89a1-452e-a881-8b5684753289" + }, + { + "keyIds": [], + "variationId": "aef687c3-41d8-40db-8710-4c1f9bac6560" + }, + { + "keyIds": [], + "variationId": "9253b858-c2b8-47cd-a056-14025959a1c5" + }, + { + "keyIds": [], + "variationId": "ef43195a-8874-44a0-aaef-f1d33c476799" + }, + { + "keyIds": [], + "variationId": "1b540abc-291f-4a4e-a129-19bf2101a6bc" + }, + { + "keyIds": [], + "variationId": "2b61a434-f29e-4913-9b4c-d22c7bf2c68d" + }, + { + "keyIds": [], + "variationId": "5a0fab50-c344-4b1e-b15f-c5bfae1d0087" + }, + { + "keyIds": [], + "variationId": "eacac1c1-190b-4822-897c-ec7867a4da85" + } + ], + "rules": [ + { + "id": "e01374f3-c2bc-4ddd-9130-b87c24462899", + "name": "Test True Rule", + "includedInExpt": false, + "conditions": [ + { + "property": "graduated", + "op": "IsTrue", + "value": "IsTrue" + } + ], + "variations": [ + { + "id": "aef687c3-41d8-40db-8710-4c1f9bac6560", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + { + "id": "dfb117f2-2fcc-46c8-a7da-f1bf8bdef438", + "name": "Test Equal Rule", + "includedInExpt": false, + "conditions": [ + { + "property": "country", + "op": "Equal", + "value": "CHN" + } + ], + "variations": [ + { + "id": "9253b858-c2b8-47cd-a056-14025959a1c5", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + { + "id": "89bbebdb-39a6-4a61-8b52-91502de8a799", + "name": "Test Than Rule", + "includedInExpt": false, + "conditions": [ + { + "property": "salary", + "op": "BiggerEqualThan", + "value": "1000" + }, + { + "property": "salary", + "op": "LessEqualThan", + "value": "3000" + } + ], + "variations": [ + { + "id": "ef43195a-8874-44a0-aaef-f1d33c476799", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + { + "id": "f0fe243f-8f3a-4e30-9aa9-a5663ff5d455", + "name": "Test Contain Rule", + "includedInExpt": false, + "conditions": [ + { + "property": "email", + "op": "Contains", + "value": "gmail" + } + ], + "variations": [ + { + "id": "1b540abc-291f-4a4e-a129-19bf2101a6bc", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + { + "id": "d1d7df63-4040-4a34-92a6-12755a1c327b", + "name": "Test IsOneOf Rule", + "includedInExpt": false, + "conditions": [ + { + "property": "major", + "op": "IsOneOf", + "value": "[\"CS\",\"MATH\",\"PHYSICS\"]" + } + ], + "variations": [ + { + "id": "2b61a434-f29e-4913-9b4c-d22c7bf2c68d", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + { + "id": "b3e61368-da26-4783-8dda-f6c971323f03", + "name": "Test StartEnd Rule", + "includedInExpt": false, + "conditions": [ + { + "property": "name", + "op": "StartsWith", + "value": "group" + }, + { + "property": "name", + "op": "EndsWith", + "value": "user" + } + ], + "variations": [ + { + "id": "5a0fab50-c344-4b1e-b15f-c5bfae1d0087", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + }, + { + "id": "15506c6d-3380-4019-9767-c14a142a652a", + "name": "Rule 7", + "includedInExpt": false, + "conditions": [ + { + "property": "phone", + "op": "MatchRegex", + "value": "^(\\+?0?86\\-?)?1[345789]\\d{9}$" + } + ], + "variations": [ + { + "id": "eacac1c1-190b-4822-897c-ec7867a4da85", + "rollout": [ + 0, + 1 + ], + "exptRollout": 0 + } + ] + } + ], + "isEnabled": true, + "disabledVariationId": "7605a908-e7ec-450a-a72c-c10ad9395509", + "fallthrough": { + "includedInExpt": true, + "variations": [ + { + "id": "7605a908-e7ec-450a-a72c-c10ad9395509", + "rollout": [ + 0, + 1 + ], + "exptRollout": 1 + } + ] + }, + "exptIncludeAllTargets": true, + "isArchived": false, + "creatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "updatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "createdAt": "2022-11-05T19:25:34.102Z", + "updatedAt": "2022-11-05T21:18:40.1500633Z", + "id": "5e8e5c37-9d69-432f-80ba-af4401402207" + }, + { + "envId": "fae873f1-fa02-4b13-b073-8771c7309655", + "name": "ff-test-off", + "key": "ff-test-off", + "variationType": "boolean", + "variations": [ + { + "id": "bf2ba738-0e1e-489e-89a4-d349bfe7a51f", + "value": "true" + }, + { + "id": "675dfd98-5d02-4d9b-8f12-84a717c6479f", + "value": "false" + } + ], + "targetUsers": [ + { + "keyIds": [], + "variationId": "bf2ba738-0e1e-489e-89a4-d349bfe7a51f" + }, + { + "keyIds": [], + "variationId": "675dfd98-5d02-4d9b-8f12-84a717c6479f" + } + ], + "rules": [], + "isEnabled": false, + "disabledVariationId": "675dfd98-5d02-4d9b-8f12-84a717c6479f", + "fallthrough": { + "includedInExpt": true, + "variations": [ + { + "id": "bf2ba738-0e1e-489e-89a4-d349bfe7a51f", + "rollout": [ + 0, + 1 + ], + "exptRollout": 1 + } + ] + }, + "exptIncludeAllTargets": true, + "isArchived": false, + "creatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "updatorId": "4526975f-4f6b-4420-9dde-84c276148832", + "createdAt": "2022-10-27T13:29:50.696Z", + "updatedAt": "2022-11-05T21:30:28.7248857Z", + "id": "f682adb3-b041-4795-a114-af3b00de6e3c" + } + ], + "segments": [ + { + "envId": "fae873f1-fa02-4b13-b073-8771c7309655", + "name": "team-seg", + "description": "test segment", + "included": [ + "test-user-1" + ], + "excluded": [ + "test-user-2" + ], + "rules": [ + { + "id": "0a693f94-befd-4b2a-8f6a-bbeac5344d9b", + "name": "规则 1", + "conditions": [ + { + "property": "major", + "op": "IsOneOf", + "value": "[\"math\",\"cs\"]" + } + ] + } + ], + "isArchived": false, + "createdAt": "2022-10-25T20:46:47.436Z", + "updatedAt": "2022-10-25T20:50:06.603475Z", + "id": "a3ad3010-3bfa-415f-a49a-af39015670f7" + } + ] + } +} \ No newline at end of file diff --git a/tests/test_data_storage.py b/tests/test_data_storage.py new file mode 100644 index 0000000..33c559d --- /dev/null +++ b/tests/test_data_storage.py @@ -0,0 +1,100 @@ +import pytest + +from fbclient.category import DATATEST +from fbclient.data_storage import InMemoryDataStorage + + +@pytest.fixture +def data_storage(): + return InMemoryDataStorage() + + +@pytest.fixture +def items(): + items = {} + items["id_1"] = {"id": "id_1", "timestamp": 1, "isArchived": True, "name": "name_1"} + items["id_2"] = {"id": "id_2", "timestamp": 2, "isArchived": False, "name": "name_2"} + items["id_3"] = {"id": "id_3", "timestamp": 3, "isArchived": False, "name": "name_3"} + return items + + +def test_default_version(data_storage): + assert data_storage.latest_version == 0 + assert not data_storage.initialized + + +def test_init(data_storage, items): + all_data = {DATATEST: items} + data_storage.init(all_data, 3) + assert data_storage.latest_version == 3 + assert data_storage.initialized + assert data_storage.get(DATATEST, "id_1") is None + item = data_storage.get(DATATEST, "id_2") + assert item is not None + assert not item["isArchived"] + assert item["name"] == "name_2" + assert len(data_storage.get_all(DATATEST)) == 2 + + +def test_invalid_init(data_storage, items): + all_data = {DATATEST: items} + data_storage.init(None, 3) + assert data_storage.latest_version == 0 + assert not data_storage.initialized + data_storage.init(DATATEST, None) + assert data_storage.latest_version == 0 + assert not data_storage.initialized + data_storage.init(all_data, 0) + assert data_storage.latest_version == 0 + assert not data_storage.initialized + data_storage.init(all_data, 3) + data_storage.init(all_data, 2) + assert data_storage.latest_version == 3 + assert data_storage.initialized + + +def test_upsert(data_storage): + item_1 = {"id": "id_1", "timestamp": 1, "isArchived": True, "name": "name_1"} + item_2 = {"id": "id_2", "timestamp": 2, "isArchived": False, "name": "name_2"} + item_3 = {"id": "id_3", "timestamp": 3, "isArchived": False, "name": "name_3"} + data_storage.upsert(DATATEST, "id_1", item_1, 1) + data_storage.upsert(DATATEST, "id_2", item_2, 2) + data_storage.upsert(DATATEST, "id_3", item_3, 3) + assert data_storage.latest_version == 3 + assert data_storage.initialized + assert data_storage.get(DATATEST, "id_1") is None + item = data_storage.get(DATATEST, "id_2") + assert item is not None + assert not item["isArchived"] + assert item["name"] == "name_2" + assert len(data_storage.get_all(DATATEST)) == 2 + item_2 = {"id": "id_2", "timestamp": 4, "isArchived": False, "name": "name_2_2"} + data_storage.upsert(DATATEST, "id_2", item_2, 4) + item = data_storage.get(DATATEST, "id_2") + assert item is not None + assert not item["isArchived"] + assert item["name"] == "name_2_2" + + +def test_invalid_upsert(data_storage): + item_1 = {"id": "id_1", "timestamp": 1, "isArchived": False, "name": "name_1"} + item_2 = {"id": "id_2", "timestamp": 2, "isArchived": False, "name": "name_2"} + data_storage.upsert(None, "id_1", item_1, 1) + assert data_storage.latest_version == 0 + assert not data_storage.initialized + data_storage.upsert(DATATEST, None, item_1, 1) + assert data_storage.latest_version == 0 + assert not data_storage.initialized + data_storage.upsert(DATATEST, "id_1", None, 1) + assert data_storage.latest_version == 0 + assert not data_storage.initialized + data_storage.upsert(DATATEST, "id_1", item_1, None) + assert data_storage.latest_version == 0 + assert not data_storage.initialized + data_storage.upsert(DATATEST, "id_1", item_1, 0) + assert data_storage.latest_version == 0 + assert not data_storage.initialized + data_storage.upsert(DATATEST, "id_1", item_1, 1) + data_storage.upsert(DATATEST, "id_2", item_2, 1) + assert data_storage.latest_version == 1 + assert data_storage.initialized diff --git a/tests/test_data_update_status_provider.py b/tests/test_data_update_status_provider.py new file mode 100644 index 0000000..fc028f5 --- /dev/null +++ b/tests/test_data_update_status_provider.py @@ -0,0 +1,120 @@ +import threading +from time import sleep, time +from unittest.mock import patch + +import pytest + +from fbclient.category import DATATEST +from fbclient.data_storage import InMemoryDataStorage +from fbclient.status import DataUpdateStatusProviderIml +from fbclient.status_types import State, StateType + + +@pytest.fixture +def items(): + items = {} + items["id_1"] = {"id": "id_1", "timestamp": 1, "isArchived": True, "name": "name_1"} + items["id_2"] = {"id": "id_2", "timestamp": 2, "isArchived": False, "name": "name_2"} + items["id_3"] = {"id": "id_3", "timestamp": 3, "isArchived": False, "name": "name_3"} + return items + + +@pytest.fixture +def data_storage(): + return InMemoryDataStorage() + + +@pytest.fixture +def data_updator(data_storage): + return DataUpdateStatusProviderIml(data_storage) + + +def test_init_data_storage(data_updator, data_storage, items): + all_data = {DATATEST: items} + if data_updator.init(all_data, 3): + data_updator.update_state(State.ok_state()) + assert data_updator.latest_version == 3 + assert data_updator.initialized + assert data_updator.current_state.state_type == StateType.OK + item = data_storage.get(DATATEST, "id_2") + assert item["name"] == "name_2" + + +def test_upsert_data_storage(data_updator, data_storage, items): + all_data = {DATATEST: items} + if data_updator.init(all_data, 3): + data_updator.update_state(State.ok_state()) + item_2 = {"id": "id_2", "timestamp": 4, "isArchived": False, "name": "name_2_2"} + if data_updator.upsert(DATATEST, "id_2", item_2, 4): + data_updator.update_state(State.ok_state()) + assert data_updator.latest_version == 4 + assert data_updator.initialized + item = data_storage.get(DATATEST, "id_2") + assert item["name"] == "name_2_2" + assert data_updator.current_state.state_type == StateType.OK + + +@patch.object(InMemoryDataStorage, "init") +def test_init_data_storage_unexptected_error(mock_init_method, data_updator, data_storage, items): + mock_init_method.side_effect = RuntimeError("test exception") + all_data = {DATATEST: items} + if data_updator.init(all_data, 3): + data_updator.update_state(State.ok_state()) + assert data_updator.latest_version == 0 + assert not data_updator.initialized + assert data_updator.current_state.state_type == StateType.INITIALIZING + assert len(data_storage.get_all(DATATEST)) == 0 + + +@patch.object(InMemoryDataStorage, "upsert") +def test_upsert_data_storage_unexpected_error(mock_upsert_method, data_updator, data_storage, items): + mock_upsert_method.side_effect = RuntimeError("test exception") + all_data = {DATATEST: items} + if data_updator.init(all_data, 3): + data_updator.update_state(State.ok_state()) + assert data_updator.latest_version == 3 + assert data_updator.initialized + assert data_updator.current_state.state_type == StateType.OK + item_2 = {"id": "id_2", "timestamp": 4, "isArchived": False, "name": "name_2_2"} + if data_updator.upsert(DATATEST, "id_2", item_2, 4): + data_updator.update_state(State.ok_state()) + assert data_updator.latest_version == 3 + assert data_updator.initialized + assert data_updator.current_state.state_type == StateType.INTERRUPTED + item_2 = data_storage.get(DATATEST, "id_2") + assert item_2["name"] == "name_2" + + +def test_update_state(data_updator): + data_updator.update_state(State.interrupted_state("some type", "some reason")) + assert data_updator.current_state.state_type == StateType.INITIALIZING + data_updator.update_state(State.ok_state()) + data_updator.update_state(State.interrupted_state("some type", "some reason")) + assert data_updator.current_state.state_type == StateType.INTERRUPTED + + +def test_wait_for_OKState(data_updator): + assert not data_updator.wait_for_OKState(timeout=0.1) + data_updator.update_state(State.ok_state()) + assert data_updator.wait_for_OKState(timeout=0.1) + + +def test_wait_for_OKState_in_thread(data_updator): + def dummy(): + sleep(0.05) + data_updator.update_state(State.ok_state()) + + t = threading.Thread(target=dummy) + t.start() + time_1 = time() + assert data_updator.wait_for_OKState(timeout=0.1) + time_2 = time() + assert time_2 - time_1 < 0.1 + assert time_2 - time_1 >= 0.05 + + +def test_wait_for_timeout(data_updator): + time_1 = time() + assert not data_updator.wait_for_OKState(timeout=0.1) + time_2 = time() + assert time_2 - time_1 >= 0.1 diff --git a/tests/test_evaluator.py b/tests/test_evaluator.py new file mode 100644 index 0000000..9576066 --- /dev/null +++ b/tests/test_evaluator.py @@ -0,0 +1,138 @@ +import json +from pathlib import Path + +import pytest + +from fbclient.category import FEATURE_FLAGS, SEGMENTS +from fbclient.common_types import FBUser +from fbclient.data_storage import InMemoryDataStorage +from fbclient.evaluator import (REASON_FALLTHROUGH, REASON_FLAG_OFF, + REASON_RULE_MATCH, REASON_TARGET_MATCH, + Evaluator) +from fbclient.event_types import FlagEvent +from fbclient.streaming import _data_to_dict +from fbclient.utils import valide_all_data + +USER_1 = {"key": "test-user-1", "name": "test-user-1", "country": "us"} +USER_2 = {"key": "test-target-user", "name": "test-target-user"} +USER_3 = {"key": "test-true-user", "name": "test-true-user", "graduated": "true"} +USER_4 = {"key": "test-equal-user", "name": "test-equal-user", "country": "CHN"} +USER_5 = {"key": "test-than-user", "name": "test-than-user", "salary": "2500"} +USER_6 = {"key": "test-contain-user", "name": "test-contain-user", "email": "test-contain-user@gmail.com"} +USER_7 = {"key": "test-isoneof-user", "name": "test-isoneof-user", "major": "CS"} +USER_8 = {"key": "group-admin-user", "name": "group-admin-user"} +USER_9 = {"key": "test-regex-user", "name": "test-regex-user", "phone": "18555358000"} +USER_10 = {"key": "test-fallthrough-user", "name": "test-fallthrough-user"} + + +@pytest.fixture +def data_storage(): + json_str = Path('tests/fbclient_test_data.json').read_text() + data_storage = InMemoryDataStorage() + if json_str: + all_data = json.loads(json_str) + if valide_all_data(all_data): + version, data = _data_to_dict(all_data['data']) + data_storage.init(data, version) + return data_storage + + +@pytest.fixture +def evaluator(data_storage): + def flag_getter(key): + return data_storage.get(FEATURE_FLAGS, key) + + def segment_getter(key): + return data_storage.get(SEGMENTS, key) + + return Evaluator(flag_getter, segment_getter) + + +@pytest.fixture +def disable_flag(data_storage): + return data_storage.get(FEATURE_FLAGS, "ff-test-off") + + +@pytest.fixture +def flag(data_storage): + return data_storage.get(FEATURE_FLAGS, "ff-evaluation-test") + + +def test_evaluation_when_disable_flag(evaluator, disable_flag): + user = FBUser.from_dict(USER_1) + event = FlagEvent(user) + er = evaluator.evaluate(disable_flag, user, event) + assert er.value == "false" + assert er.reason == REASON_FLAG_OFF + + +def test_evaluation_when_match_target_user(evaluator, flag): + user = FBUser.from_dict(USER_2) + event = FlagEvent(user) + er = evaluator.evaluate(flag, user, event) + assert er.value == "teamB" + assert er.reason == REASON_TARGET_MATCH + + +def test_evaluation_when_match_true_condition(evaluator, flag): + user = FBUser.from_dict(USER_3) + event = FlagEvent(user) + er = evaluator.evaluate(flag, user, event) + assert er.value == "teamC" + assert er.reason == REASON_RULE_MATCH + + +def test_evaluation_when_match_equal_condition(evaluator, flag): + user = FBUser.from_dict(USER_4) + event = FlagEvent(user) + er = evaluator.evaluate(flag, user, event) + assert er.value == "teamD" + assert er.reason == REASON_RULE_MATCH + + +def test_evaluation_when_match_than_condition(evaluator, flag): + user = FBUser.from_dict(USER_5) + event = FlagEvent(user) + er = evaluator.evaluate(flag, user, event) + assert er.value == "teamE" + assert er.reason == REASON_RULE_MATCH + + +def test_evaluation_when_match_contain_condition(evaluator, flag): + user = FBUser.from_dict(USER_6) + event = FlagEvent(user) + er = evaluator.evaluate(flag, user, event) + assert er.value == "teamF" + assert er.reason == REASON_RULE_MATCH + + +def test_evaluation_when_match_isoneof_condition(evaluator, flag): + user = FBUser.from_dict(USER_7) + event = FlagEvent(user) + er = evaluator.evaluate(flag, user, event) + assert er.value == "teamG" + assert er.reason == REASON_RULE_MATCH + + +def test_evaluation_when_match_startend_condition(evaluator, flag): + user = FBUser.from_dict(USER_8) + event = FlagEvent(user) + er = evaluator.evaluate(flag, user, event) + assert er.value == "teamH" + assert er.reason == REASON_RULE_MATCH + + +def test_evaluation_when_match_regex_condition(evaluator, flag): + user = FBUser.from_dict(USER_9) + event = FlagEvent(user) + er = evaluator.evaluate(flag, user, event) + assert er.value == "teamI" + assert er.reason == REASON_RULE_MATCH + + +def test_evaluation_when_match_fallthroug_condition(evaluator, flag): + user = FBUser.from_dict(USER_10) + event = FlagEvent(user) + er = evaluator.evaluate(flag, user, event) + assert er.value == "teamA" + assert er.reason == REASON_FALLTHROUGH diff --git a/tests/test_event_processor.py b/tests/test_event_processor.py new file mode 100644 index 0000000..81eb212 --- /dev/null +++ b/tests/test_event_processor.py @@ -0,0 +1,124 @@ +import base64 +import threading +from time import sleep +import pytest +from fbclient.common_types import FBUser + +from fbclient.config import Config +from fbclient.event_processor import DefaultEventProcessor +from fbclient.event_types import UserEvent +from fbclient.utils.http_client import MockSender + +FAKE_URL = "http://fake" +FAKE_ENV_SECRET = base64.b64encode(b"fake_env_secret").decode() +USER_1 = {"key": "test-user-1", "name": "test-user-1"} +USER_2 = {"key": "test-user-2", "name": "test-user-2"} +USER_3 = {"key": "test-user-3", "name": "test-user-3"} + + +@pytest.fixture +def ready_event(): + return threading.Event() + + +@pytest.fixture +def mock_sender(ready_event): + return MockSender(ready_event) + + +@pytest.fixture +def event_processor(mock_sender): + config = Config(FAKE_ENV_SECRET, + event_url=FAKE_URL, + streaming_url=FAKE_URL, + events_flush_interval=0.1, + events_max_in_queue=100) + return DefaultEventProcessor(config, mock_sender) + + +def test_event_processor_start_and_stop(event_processor, mock_sender): + with event_processor: + assert mock_sender.get_sending_json_info(timeout=0.2) is None + assert mock_sender.closed + + +def test_event_processor_can_gracefully_close_if_sender_error_on_close(event_processor, mock_sender): + with event_processor: + mock_sender.fake_error_on_close = RuntimeError("test exception") + assert mock_sender.get_sending_json_info(timeout=0.2) is None + assert mock_sender.closed + + +def test_event_processor_send_auto_flush(event_processor, mock_sender): + with event_processor as ep: + ep.send_event(UserEvent(FBUser.from_dict(USER_1))) + ep.send_event(UserEvent(FBUser.from_dict(USER_2))) + info = mock_sender.get_sending_json_info(timeout=0.2) + if info.size == 1: + assert info.is_contain_user("test-user-1") + else: + assert info.is_contain_user("test-user-1") + assert info.is_contain_user("test-user-2") + assert mock_sender.closed + + +def test_event_processor_send_manuel_flush(event_processor, mock_sender): + with event_processor as ep: + ep.send_event(UserEvent(FBUser.from_dict(USER_1))) + ep.flush() + info = mock_sender.get_sending_json_info(timeout=0.2) + assert info.size == 1 + assert info.is_contain_user("test-user-1") + ep.send_event(UserEvent(FBUser.from_dict(USER_2))) + ep.flush() + info = mock_sender.get_sending_json_info(timeout=0.2) + assert info.size == 1 + assert info.is_contain_user("test-user-2") + assert mock_sender.closed + + +def test_event_processor_can_work_if_sender_error(event_processor, mock_sender): + with event_processor as ep: + mock_sender.fake_error = RuntimeError("test exception") + ep.send_event(UserEvent(FBUser.from_dict(USER_1))) + ep.flush() + mock_sender.get_sending_json_info(timeout=0.2) + + mock_sender.fake_error = None + ep.send_event(UserEvent(FBUser.from_dict(USER_2))) + ep.flush() + info = mock_sender.get_sending_json_info(timeout=0.2) + assert info.size == 1 + assert info.is_contain_user("test-user-2") + assert mock_sender.closed + + +def test_event_processor_cannot_send_anything_after_close(event_processor, mock_sender): + assert mock_sender.get_sending_json_info(timeout=0.2) is None + event_processor.stop() + assert mock_sender.closed + event_processor.send_event(UserEvent(FBUser.from_dict(USER_1))) + event_processor.flush() + assert mock_sender.get_sending_json_info(timeout=0.2) is None + + +def test_event_processor_events_keep_in_buffer_if_all_flush_payload_runner_are_busy(event_processor, mock_sender, ready_event): + with event_processor as ep: + mock_sender.thread_num = 5 + for _ in range(5): + ep.send_event(UserEvent(FBUser.from_dict(USER_1))) + ep.flush() + mock_sender.get_sending_json_info(timeout=0.2) + ready_event.wait() + mock_sender.thread_num = 0 + ep.send_event(UserEvent(FBUser.from_dict(USER_2))) + ep.flush() + ep.send_event(UserEvent(FBUser.from_dict(USER_3))) + ep.flush() + sleep(0.1) + mock_sender.notify_locked_thread() + info = mock_sender.get_sending_json_info(timeout=0.2) + assert info.size == 2 + assert info.is_contain_user("test-user-2") + assert info.is_contain_user("test-user-3") + assert mock_sender.closed diff --git a/tests/test_fbclient.py b/tests/test_fbclient.py new file mode 100644 index 0000000..638cd9c --- /dev/null +++ b/tests/test_fbclient.py @@ -0,0 +1,250 @@ +import base64 +from pathlib import Path +from unittest.mock import patch + +import pytest + +from fbclient.client import FBClient +from fbclient.config import Config +from fbclient.data_storage import InMemoryDataStorage +from fbclient.evaluator import (REASON_CLIENT_NOT_READY, REASON_ERROR, + REASON_FALLTHROUGH, REASON_FLAG_NOT_FOUND, + REASON_RULE_MATCH, REASON_TARGET_MATCH, + REASON_USER_NOT_SPECIFIED) +from fbclient.event_processor import NullEventProcessor +from fbclient.update_processor import NullUpdateProcessor + +FAKE_ENV_SECRET = base64.b64encode(b"fake_env_secret").decode() + +FAKE_URL = "http://fake" + +USER_1 = {"key": "test-user-1", "name": "test-user-1", "country": "us"} +USER_2 = {"key": "test-user-2", "name": "test-user-2", "country": "fr"} +USER_3 = {"key": "test-user-3", "name": "test-user-3", "country": "cn", "major": "cs"} +USER_4 = {"key": "test-user-4", "name": "test-user-4", "country": "uk", "major": "physics"} +USER_CN_PHONE_NUM = {"key": "18555358000", "name": "test-user-5"} +USER_FR_PHONE_NUM = {"key": "0603111111", "name": "test-user-6"} +USER_EMAIL = {"key": "test-user-7@featbit.com", "name": "test-user-7"} +DUMMY_USER = {"key": "12345", "name": "dummy"} + + +def make_fb_client(update_processor_imp, event_processor_imp, start_wait=15.): + config = Config(FAKE_ENV_SECRET, + event_url=FAKE_URL, + streaming_url=FAKE_URL, + update_processor_imp=update_processor_imp, + event_processor_imp=event_processor_imp) + return FBClient(config, start_wait=start_wait) + + +def make_fb_client_offline(start_wait=15.): + json = Path('tests/fbclient_test_data.json').read_text() + config = Config(FAKE_ENV_SECRET, + event_url=FAKE_URL, + streaming_url=FAKE_URL, + offline=True) + client = FBClient(config, start_wait=start_wait) + client.initialize_from_external_json(json) + return client + + +def test_construct_null_config(): + with pytest.raises(ValueError) as exc_info: + FBClient(None) # type: ignore + assert exc_info.value.args[0] == "Config is not valid" + + +def test_construct_empty_envsecret(): + with pytest.raises(ValueError) as exc_info: + FBClient(Config("", event_url=FAKE_URL, streaming_url=FAKE_URL)) + assert exc_info.value.args[0] == "env secret is invalid" + + +def test_construct_illegal_envsecret(): + with pytest.raises(ValueError) as exc_info: + FBClient(Config(FAKE_ENV_SECRET + "©öäü£", event_url=FAKE_URL, streaming_url=FAKE_URL)) + assert exc_info.value.args[0] == "env secret is invalid" + + +def test_construct_empty_url(): + with pytest.raises(ValueError) as exc_info: + FBClient(Config(FAKE_ENV_SECRET, event_url="", streaming_url="")) + assert exc_info.value.args[0] == "streaming or event url is invalid" + + +def test_construct_invalid_url(): + with pytest.raises(ValueError) as exc_info: + FBClient(Config(FAKE_ENV_SECRET, event_url="mailto:John.Doe@example.com", streaming_url="urn:isbn:0-294-56559-3")) + assert exc_info.value.args[0] == "streaming or event url is invalid" + + +def test_start_and_wait(): + with make_fb_client(NullUpdateProcessor, NullEventProcessor, start_wait=0.1) as client: + assert client.initialize + + +@patch.object(NullUpdateProcessor, "start") +def test_start_and_timeout(mock_start_method): + def start(): + pass + mock_start_method.side_effect = start + with make_fb_client(NullUpdateProcessor, NullEventProcessor, start_wait=0.1) as client: + assert not client.initialize + + +@patch.object(NullUpdateProcessor, "start") +def test_start_and_nowait(mock_start_method): + def start(): + pass + mock_start_method.side_effect = start + with make_fb_client(NullUpdateProcessor, NullEventProcessor, start_wait=0) as client: + assert not client.update_status_provider.wait_for_OKState(timeout=0.1) + + +@patch.object(NullUpdateProcessor, "start") +def test_variation_when_client_not_initialized(mock_start_method): + def start(): + pass + mock_start_method.side_effect = start + with make_fb_client(NullUpdateProcessor, NullEventProcessor, start_wait=0.1) as client: + assert not client.initialize + flag_state = client.variation_detail("ff-test-bool", USER_1, False) + assert not flag_state.success + assert not flag_state.data.variation + assert flag_state.data.reason == REASON_CLIENT_NOT_READY + all_states = client.get_all_latest_flag_variations(USER_1) # type: ignore + assert not all_states.success + assert all_states.message == REASON_CLIENT_NOT_READY + + +def test_bool_variation(): + with make_fb_client_offline() as client: + assert client.initialize + assert client.is_enabled("ff-test-bool", USER_1) + assert client.variation("ff-test-bool", USER_1, False) + flag_state = client.variation_detail("ff-test-bool", USER_2, False) + assert flag_state.success + assert flag_state.data.variation + assert flag_state.data.reason == REASON_TARGET_MATCH + assert not client.is_enabled("ff-test-bool", USER_3) + flag_state = client.variation_detail("ff-test-bool", USER_4, False) + assert flag_state.success + assert flag_state.data.variation + assert flag_state.data.reason == REASON_FALLTHROUGH + + +def test_numeric_variation(): + with make_fb_client_offline() as client: + assert client.initialize + assert client.variation("ff-test-number", USER_1, -1) == 1 + flag_state = client.variation_detail("ff-test-number", USER_2, -1) + assert flag_state.success + assert flag_state.data.variation == 33 + assert flag_state.data.reason == REASON_RULE_MATCH + assert client.variation("ff-test-number", USER_3, -1) == 86 + flag_state = client.variation_detail("ff-test-number", USER_4, -1) + assert flag_state.success + assert flag_state.data.variation == 9999 + assert flag_state.data.reason == REASON_FALLTHROUGH + + +def test_string_variation(): + with make_fb_client_offline() as client: + assert client.initialize + assert client.variation("ff-test-string", USER_CN_PHONE_NUM, 'error') == 'phone number' + flag_state = client.variation_detail("ff-test-string", USER_FR_PHONE_NUM, 'error') + assert flag_state.success + assert flag_state.data.variation == 'phone number' + assert flag_state.data.reason == REASON_RULE_MATCH + assert client.variation("ff-test-string", USER_EMAIL, 'error') == 'email' + flag_state = client.variation_detail("ff-test-string", USER_1, 'error') + assert flag_state.success + assert flag_state.data.variation == 'others' + assert flag_state.data.reason == REASON_FALLTHROUGH + + +def test_segment(): + with make_fb_client_offline() as client: + assert client.initialize + assert client.variation("ff-test-seg", USER_1, 'error') == 'teamA' + assert client.variation("ff-test-seg", USER_2, 'error') == 'teamB' + flag_state = client.variation_detail("ff-test-seg", USER_3, 'error') + assert flag_state.success + assert flag_state.data.variation == 'teamA' + assert flag_state.data.reason == REASON_RULE_MATCH + flag_state = client.variation_detail("ff-test-seg", USER_4, 'error') + assert flag_state.success + assert flag_state.data.variation == 'teamB' + assert flag_state.data.reason == REASON_FALLTHROUGH + + +def test_json_variation(): + with make_fb_client_offline() as client: + assert client.initialize + json_object = client.variation("ff-test-json", USER_1, {}) + assert json_object["code"] == 404 + assert json_object["reason"] == "fail to win the lottery" + flag_state = client.variation_detail("ff-test-json", DUMMY_USER, {}) + assert flag_state.success + assert flag_state.data.variation["code"] == 200 + assert flag_state.data.variation["reason"] == "you win 100 euros" + assert flag_state.data.reason == REASON_FALLTHROUGH + + +def test_flag_known(): + with make_fb_client_offline() as client: + assert client.initialize + assert client.is_flag_known("ff-test-bool") + assert client.is_flag_known("ff-test-number") + assert client.is_flag_known("ff-test-string") + assert client.is_flag_known("ff-test-seg") + assert client.is_flag_known("ff-test-json") + assert not client.is_flag_known("ff-not-existed") + + +def test_get_all_latest_flag_variations(): + with make_fb_client_offline() as client: + assert client.initialize + all_states = client.get_all_latest_flag_variations(USER_1) + ed = all_states.get("ff-test-bool") + assert ed is not None and ed.variation + ed = all_states.get("ff-test-number") + assert ed is not None and ed.variation == 1 + ed = all_states.get("ff-test-string") + assert ed is not None and ed.variation == "others" + ed = all_states.get("ff-test-seg") + assert ed is not None and ed.variation == "teamA" + ed = all_states.get("ff-test-json") + assert ed is not None and ed.variation["code"] == 404 + + +def test_variation_argument_error(): + with make_fb_client_offline() as client: + assert client.initialize + flag_state = client.variation_detail("ff-not-existed", USER_1, False) + assert not flag_state.success + assert not flag_state.data.variation + assert flag_state.data.reason == REASON_FLAG_NOT_FOUND + flag_state = client.variation_detail("ff-test-bool", None, False) # type: ignore + assert not flag_state.success + assert not flag_state.data.variation + assert flag_state.data.reason == REASON_USER_NOT_SPECIFIED + all_states = client.get_all_latest_flag_variations(None) # type: ignore + assert not all_states.success + assert all_states.message == REASON_USER_NOT_SPECIFIED + + +@patch.object(InMemoryDataStorage, "get_all") +@patch.object(InMemoryDataStorage, "get") +def test_variation_unexpected_error(mock_get_method, mock_get_all_method): + mock_get_method.side_effect = RuntimeError('test exception') + mock_get_all_method.side_effect = RuntimeError('test exception') + with make_fb_client_offline() as client: + assert client.initialize + flag_state = client.variation_detail("ff-test-bool", USER_1, False) + assert not flag_state.success + assert not flag_state.data.variation + assert flag_state.data.reason == REASON_ERROR + all_states = client.get_all_latest_flag_variations(USER_1) + assert not all_states.success + assert all_states.message == REASON_ERROR