Skip to content

Commit

Permalink
Merge branch 'main' into 191_add_new_constraint_operators
Browse files Browse the repository at this point in the history
  • Loading branch information
ivanklee86 committed Mar 4, 2022
2 parents 08a3f40 + 01d7d2d commit c8ae1ac
Show file tree
Hide file tree
Showing 14 changed files with 214 additions and 103 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
@@ -1,5 +1,6 @@
## Next version
* (Major) Support new constraint operators.
* (Major) Add cache abstraction. Thanks @walison17!
* (Minor) Refactor `unleash-client-python` to modernize tooling (`setuptools_scm` and centralizing tool config in `pyproject.toml`).
* (Minor) Migrate documentation to Sphinx.

Expand Down
24 changes: 14 additions & 10 deletions UnleashClient/__init__.py
Expand Up @@ -2,7 +2,6 @@
import warnings
from datetime import datetime, timezone
from typing import Callable, Optional
from fcache.cache import FileCache
from apscheduler.job import Job
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.interval import IntervalTrigger
Expand All @@ -13,6 +12,7 @@
from UnleashClient.constants import METRIC_LAST_SENT_TIME, DISABLED_VARIATION, ETAG
from .utils import LOGGER
from .deprecation_warnings import strategy_v2xx_deprecation_check
from .cache import BaseCache, FileCache

# pylint: disable=dangerous-default-value
class UnleashClient:
Expand All @@ -33,6 +33,7 @@ class UnleashClient:
:param custom_strategies: Dictionary of custom strategy names : custom strategy objects.
:param cache_directory: Location of the cache directory. When unset, FCache will determine the location.
:param verbose_log_level: Numerical log level (https://docs.python.org/3/library/logging.html#logging-levels) for cases where checking a feature flag fails.
:param cache: Custom cache implementation that extends UnleashClient.cache.BaseCache. When unset, UnleashClient will use Fcache.
"""
def __init__(self,
url: str,
Expand All @@ -48,9 +49,10 @@ def __init__(self,
custom_headers: Optional[dict] = None,
custom_options: Optional[dict] = None,
custom_strategies: Optional[dict] = None,
cache_directory: str = None,
cache_directory: Optional[str] = None,
project_name: str = None,
verbose_log_level: int = 30) -> None:
verbose_log_level: int = 30,
cache: Optional[BaseCache] = None) -> None:
custom_headers = custom_headers or {}
custom_options = custom_options or {}
custom_strategies = custom_strategies or {}
Expand All @@ -76,14 +78,16 @@ def __init__(self,
self.unleash_verbose_log_level = verbose_log_level

# Class objects
self.cache = FileCache(self.unleash_instance_id, app_cache_dir=cache_directory)
self.features: dict = {}
self.scheduler = BackgroundScheduler()
self.fl_job: Job = None
self.metric_job: Job = None
self.cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc)
self.cache[ETAG] = ''
self.cache.sync()

self.cache = cache or FileCache(self.unleash_app_name, directory=cache_directory)
self.cache.mset({
METRIC_LAST_SENT_TIME: datetime.now(timezone.utc),
ETAG: ''
})

# Mappings
default_strategy_mapping = {
Expand Down Expand Up @@ -149,7 +153,7 @@ def initialize_client(self) -> None:
"custom_headers": self.unleash_custom_headers,
"custom_options": self.unleash_custom_options,
"features": self.features,
"ondisk_cache": self.cache
"cache": self.cache
}

# Register app
Expand All @@ -158,7 +162,7 @@ def initialize_client(self) -> None:
self.unleash_metrics_interval, self.unleash_custom_headers,
self.unleash_custom_options, self.strategy_mapping)

fetch_and_load_features(**fl_args)
fetch_and_load_features(**fl_args) # type: ignore

# Start periodic jobs
self.scheduler.start()
Expand Down Expand Up @@ -196,7 +200,7 @@ def destroy(self) -> None:
if self.metric_job:
self.metric_job.remove()
self.scheduler.shutdown()
self.cache.delete()
self.cache.destroy()

@staticmethod
def _get_fallback_value(fallback_function: Callable, feature_name: str, context: dict) -> bool:
Expand Down
48 changes: 48 additions & 0 deletions UnleashClient/cache.py
@@ -0,0 +1,48 @@
import abc
from typing import Any, Optional

from fcache.cache import FileCache as _FileCache


class BaseCache(abc.ABC):
@abc.abstractmethod
def set(self, key: str, value: Any):
pass

@abc.abstractmethod
def mset(self, data: dict):
pass

@abc.abstractmethod
def get(self, key: str, default: Optional[Any] = None):
pass

@abc.abstractmethod
def exists(self, key: str):
pass

@abc.abstractmethod
def destroy(self):
pass


class FileCache(BaseCache):
def __init__(self, name: str, directory: Optional[str] = None):
self._cache = _FileCache(name, app_cache_dir=directory)

def set(self, key: str, value: Any):
self._cache[key] = value
self._cache.sync()

def mset(self, data: dict):
self._cache.update(data)
self._cache.sync()

def get(self, key: str, default: Optional[Any] = None):
return self._cache.get(key, default)

def exists(self, key: str):
return key in self._cache

def destroy(self):
return self._cache.delete()
92 changes: 46 additions & 46 deletions UnleashClient/loader.py
@@ -1,14 +1,14 @@
from fcache.cache import FileCache
from UnleashClient.features.Feature import Feature
from UnleashClient.variants.Variants import Variants
from UnleashClient.constants import FEATURES_URL, FAILED_STRATEGIES
from UnleashClient.utils import LOGGER
from UnleashClient.cache import BaseCache


# pylint: disable=broad-except
def _create_strategies(provisioning: dict,
strategy_mapping: dict,
cache: FileCache) -> list:
cache: BaseCache) -> list:
feature_strategies = []

for strategy in provisioning["strategies"]:
Expand All @@ -27,19 +27,20 @@ def _create_strategies(provisioning: dict,
constraints=constraint_provisioning, parameters=strategy_provisioning
))
except Exception as excep:
if FAILED_STRATEGIES not in cache.keys():
cache[FAILED_STRATEGIES] = [] # Initialize cache key only if failures exist.
strategies = cache.get(FAILED_STRATEGIES, [])

if strategy['name'] not in cache[FAILED_STRATEGIES]:
if strategy['name'] not in strategies:
LOGGER.warning("Failed to load strategy. This may be a problem with a custom strategy. Exception: %s", excep)
cache[FAILED_STRATEGIES].append(strategy['name'])
strategies.append(strategy['name'])

cache.set(FAILED_STRATEGIES, strategies)

return feature_strategies


def _create_feature(provisioning: dict,
strategy_mapping: dict,
cache: FileCache) -> Feature:
cache: BaseCache) -> Feature:
if "strategies" in provisioning.keys():
parsed_strategies = _create_strategies(provisioning, strategy_mapping, cache)
else:
Expand All @@ -57,7 +58,7 @@ def _create_feature(provisioning: dict,
)


def load_features(cache: FileCache,
def load_features(cache: BaseCache,
feature_toggles: dict,
strategy_mapping: dict) -> None:
"""
Expand All @@ -69,43 +70,42 @@ def load_features(cache: FileCache,
:return:
"""
# Pull raw provisioning from cache.
try:
feature_provisioning = cache[FEATURES_URL]

# Parse provisioning
parsed_features = {}
feature_names = [d["name"] for d in feature_provisioning["features"]]

for provisioning in feature_provisioning["features"]:
parsed_features[provisioning["name"]] = provisioning

# Delete old features/cache
for feature in list(feature_toggles.keys()):
if feature not in feature_names:
del feature_toggles[feature]

# Update existing objects
for feature in feature_toggles.keys():
feature_for_update = feature_toggles[feature]
strategies = parsed_features[feature]["strategies"]

feature_for_update.enabled = parsed_features[feature]["enabled"]
if strategies:
parsed_strategies = _create_strategies(parsed_features[feature], strategy_mapping, cache)
feature_for_update.strategies = parsed_strategies

if 'variants' in parsed_features[feature]:
feature_for_update.variants = Variants(
parsed_features[feature]['variants'],
parsed_features[feature]['name']
)

# Handle creation or deletions
new_features = list(set(feature_names) - set(feature_toggles.keys()))

for feature in new_features:
feature_toggles[feature] = _create_feature(parsed_features[feature], strategy_mapping, cache)
except KeyError as cache_exception:
LOGGER.warning("Cache Exception: %s", cache_exception)
feature_provisioning = cache.get(FEATURES_URL)
if not feature_provisioning:
LOGGER.warning("Unleash client does not have cached features. "
"Please make sure client can communicate with Unleash server!")
return

# Parse provisioning
parsed_features = {}
feature_names = [d["name"] for d in feature_provisioning["features"]]

for provisioning in feature_provisioning["features"]:
parsed_features[provisioning["name"]] = provisioning

# Delete old features/cache
for feature in list(feature_toggles.keys()):
if feature not in feature_names:
del feature_toggles[feature]

# Update existing objects
for feature in feature_toggles.keys():
feature_for_update = feature_toggles[feature]
strategies = parsed_features[feature]["strategies"]

feature_for_update.enabled = parsed_features[feature]["enabled"]
if strategies:
parsed_strategies = _create_strategies(parsed_features[feature], strategy_mapping, cache)
feature_for_update.strategies = parsed_strategies

if 'variants' in parsed_features[feature]:
feature_for_update.variants = Variants(
parsed_features[feature]['variants'],
parsed_features[feature]['name']
)

# Handle creation or deletions
new_features = list(set(feature_names) - set(feature_toggles.keys()))

for feature in new_features:
feature_toggles[feature] = _create_feature(parsed_features[feature], strategy_mapping, cache)
12 changes: 5 additions & 7 deletions UnleashClient/periodic_tasks/fetch_and_load.py
@@ -1,16 +1,16 @@
from fcache.cache import FileCache
from UnleashClient.api import get_feature_toggles
from UnleashClient.loader import load_features
from UnleashClient.constants import FEATURES_URL, ETAG
from UnleashClient.utils import LOGGER
from UnleashClient.cache import BaseCache


def fetch_and_load_features(url: str,
app_name: str,
instance_id: str,
custom_headers: dict,
custom_options: dict,
cache: FileCache,
cache: BaseCache,
features: dict,
strategy_mapping: dict,
project: str = None) -> None:
Expand All @@ -21,17 +21,15 @@ def fetch_and_load_features(url: str,
custom_headers,
custom_options,
project,
cache[ETAG]
cache.get(ETAG)
)

if feature_provisioning:
cache[FEATURES_URL] = feature_provisioning
cache.sync()
cache.set(FEATURES_URL, feature_provisioning)
else:
LOGGER.warning("Unable to get feature flag toggles, using cached provisioning.")

if etag:
cache[ETAG] = etag
cache.sync()
cache.set(ETAG, etag)

load_features(cache, features, strategy_mapping)
9 changes: 4 additions & 5 deletions UnleashClient/periodic_tasks/send_metrics.py
@@ -1,9 +1,9 @@
from collections import ChainMap
from datetime import datetime, timezone
from fcache.cache import FileCache
from UnleashClient.api import send_metrics
from UnleashClient.constants import METRIC_LAST_SENT_TIME
from UnleashClient.utils import LOGGER
from UnleashClient.cache import BaseCache


def aggregate_and_send_metrics(url: str,
Expand All @@ -12,7 +12,7 @@ def aggregate_and_send_metrics(url: str,
custom_headers: dict,
custom_options: dict,
features: dict,
ondisk_cache: FileCache
cache: BaseCache
) -> None:
feature_stats_list = []

Expand All @@ -34,15 +34,14 @@ def aggregate_and_send_metrics(url: str,
"appName": app_name,
"instanceId": instance_id,
"bucket": {
"start": ondisk_cache[METRIC_LAST_SENT_TIME].isoformat(),
"start": cache.get(METRIC_LAST_SENT_TIME).isoformat(),
"stop": datetime.now(timezone.utc).isoformat(),
"toggles": dict(ChainMap(*feature_stats_list))
}
}

if feature_stats_list:
send_metrics(url, metrics_request, custom_headers, custom_options)
ondisk_cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc)
ondisk_cache.sync()
cache.set(METRIC_LAST_SENT_TIME, datetime.now(timezone.utc))
else:
LOGGER.debug("No feature flags with metrics, skipping metrics submission.")
15 changes: 15 additions & 0 deletions docs/basecache.rst
@@ -0,0 +1,15 @@
****************************************
Cache
****************************************

.. autoclass:: UnleashClient.cache.BaseCache

.. automethod:: set

.. automethod:: mset

.. automethod:: get

.. automethod:: exists

.. automethod:: destroy

0 comments on commit c8ae1ac

Please sign in to comment.