diff --git a/CHANGELOG.md b/CHANGELOG.md index 89786a6d..e20d6fbc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,6 @@ ## Next version * (Major) Support new constraint operators. +* (Major) Add cache abstraction. Thanks @walison17! * (Minor) Refactor `unleash-client-python` to modernize tooling (`setuptools_scm` and centralizing tool config in `pyproject.toml`). * (Minor) Migrate documentation to Sphinx. diff --git a/UnleashClient/__init__.py b/UnleashClient/__init__.py index 61e41a7c..3dc1cab8 100644 --- a/UnleashClient/__init__.py +++ b/UnleashClient/__init__.py @@ -2,7 +2,6 @@ import warnings from datetime import datetime, timezone from typing import Callable, Optional -from fcache.cache import FileCache from apscheduler.job import Job from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.triggers.interval import IntervalTrigger @@ -13,6 +12,7 @@ from UnleashClient.constants import METRIC_LAST_SENT_TIME, DISABLED_VARIATION, ETAG from .utils import LOGGER from .deprecation_warnings import strategy_v2xx_deprecation_check +from .cache import BaseCache, FileCache # pylint: disable=dangerous-default-value class UnleashClient: @@ -33,6 +33,7 @@ class UnleashClient: :param custom_strategies: Dictionary of custom strategy names : custom strategy objects. :param cache_directory: Location of the cache directory. When unset, FCache will determine the location. :param verbose_log_level: Numerical log level (https://docs.python.org/3/library/logging.html#logging-levels) for cases where checking a feature flag fails. + :param cache: Custom cache implementation that extends UnleashClient.cache.BaseCache. When unset, UnleashClient will use Fcache. """ def __init__(self, url: str, @@ -48,9 +49,10 @@ def __init__(self, custom_headers: Optional[dict] = None, custom_options: Optional[dict] = None, custom_strategies: Optional[dict] = None, - cache_directory: str = None, + cache_directory: Optional[str] = None, project_name: str = None, - verbose_log_level: int = 30) -> None: + verbose_log_level: int = 30, + cache: Optional[BaseCache] = None) -> None: custom_headers = custom_headers or {} custom_options = custom_options or {} custom_strategies = custom_strategies or {} @@ -76,14 +78,16 @@ def __init__(self, self.unleash_verbose_log_level = verbose_log_level # Class objects - self.cache = FileCache(self.unleash_instance_id, app_cache_dir=cache_directory) self.features: dict = {} self.scheduler = BackgroundScheduler() self.fl_job: Job = None self.metric_job: Job = None - self.cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc) - self.cache[ETAG] = '' - self.cache.sync() + + self.cache = cache or FileCache(self.unleash_app_name, directory=cache_directory) + self.cache.mset({ + METRIC_LAST_SENT_TIME: datetime.now(timezone.utc), + ETAG: '' + }) # Mappings default_strategy_mapping = { @@ -149,7 +153,7 @@ def initialize_client(self) -> None: "custom_headers": self.unleash_custom_headers, "custom_options": self.unleash_custom_options, "features": self.features, - "ondisk_cache": self.cache + "cache": self.cache } # Register app @@ -158,7 +162,7 @@ def initialize_client(self) -> None: self.unleash_metrics_interval, self.unleash_custom_headers, self.unleash_custom_options, self.strategy_mapping) - fetch_and_load_features(**fl_args) + fetch_and_load_features(**fl_args) # type: ignore # Start periodic jobs self.scheduler.start() @@ -196,7 +200,7 @@ def destroy(self) -> None: if self.metric_job: self.metric_job.remove() self.scheduler.shutdown() - self.cache.delete() + self.cache.destroy() @staticmethod def _get_fallback_value(fallback_function: Callable, feature_name: str, context: dict) -> bool: diff --git a/UnleashClient/cache.py b/UnleashClient/cache.py new file mode 100644 index 00000000..51c603c7 --- /dev/null +++ b/UnleashClient/cache.py @@ -0,0 +1,48 @@ +import abc +from typing import Any, Optional + +from fcache.cache import FileCache as _FileCache + + +class BaseCache(abc.ABC): + @abc.abstractmethod + def set(self, key: str, value: Any): + pass + + @abc.abstractmethod + def mset(self, data: dict): + pass + + @abc.abstractmethod + def get(self, key: str, default: Optional[Any] = None): + pass + + @abc.abstractmethod + def exists(self, key: str): + pass + + @abc.abstractmethod + def destroy(self): + pass + + +class FileCache(BaseCache): + def __init__(self, name: str, directory: Optional[str] = None): + self._cache = _FileCache(name, app_cache_dir=directory) + + def set(self, key: str, value: Any): + self._cache[key] = value + self._cache.sync() + + def mset(self, data: dict): + self._cache.update(data) + self._cache.sync() + + def get(self, key: str, default: Optional[Any] = None): + return self._cache.get(key, default) + + def exists(self, key: str): + return key in self._cache + + def destroy(self): + return self._cache.delete() diff --git a/UnleashClient/loader.py b/UnleashClient/loader.py index eecbf70d..c6575a40 100644 --- a/UnleashClient/loader.py +++ b/UnleashClient/loader.py @@ -1,14 +1,14 @@ -from fcache.cache import FileCache from UnleashClient.features.Feature import Feature from UnleashClient.variants.Variants import Variants from UnleashClient.constants import FEATURES_URL, FAILED_STRATEGIES from UnleashClient.utils import LOGGER +from UnleashClient.cache import BaseCache # pylint: disable=broad-except def _create_strategies(provisioning: dict, strategy_mapping: dict, - cache: FileCache) -> list: + cache: BaseCache) -> list: feature_strategies = [] for strategy in provisioning["strategies"]: @@ -27,19 +27,20 @@ def _create_strategies(provisioning: dict, constraints=constraint_provisioning, parameters=strategy_provisioning )) except Exception as excep: - if FAILED_STRATEGIES not in cache.keys(): - cache[FAILED_STRATEGIES] = [] # Initialize cache key only if failures exist. + strategies = cache.get(FAILED_STRATEGIES, []) - if strategy['name'] not in cache[FAILED_STRATEGIES]: + if strategy['name'] not in strategies: LOGGER.warning("Failed to load strategy. This may be a problem with a custom strategy. Exception: %s", excep) - cache[FAILED_STRATEGIES].append(strategy['name']) + strategies.append(strategy['name']) + + cache.set(FAILED_STRATEGIES, strategies) return feature_strategies def _create_feature(provisioning: dict, strategy_mapping: dict, - cache: FileCache) -> Feature: + cache: BaseCache) -> Feature: if "strategies" in provisioning.keys(): parsed_strategies = _create_strategies(provisioning, strategy_mapping, cache) else: @@ -57,7 +58,7 @@ def _create_feature(provisioning: dict, ) -def load_features(cache: FileCache, +def load_features(cache: BaseCache, feature_toggles: dict, strategy_mapping: dict) -> None: """ @@ -69,43 +70,42 @@ def load_features(cache: FileCache, :return: """ # Pull raw provisioning from cache. - try: - feature_provisioning = cache[FEATURES_URL] - - # Parse provisioning - parsed_features = {} - feature_names = [d["name"] for d in feature_provisioning["features"]] - - for provisioning in feature_provisioning["features"]: - parsed_features[provisioning["name"]] = provisioning - - # Delete old features/cache - for feature in list(feature_toggles.keys()): - if feature not in feature_names: - del feature_toggles[feature] - - # Update existing objects - for feature in feature_toggles.keys(): - feature_for_update = feature_toggles[feature] - strategies = parsed_features[feature]["strategies"] - - feature_for_update.enabled = parsed_features[feature]["enabled"] - if strategies: - parsed_strategies = _create_strategies(parsed_features[feature], strategy_mapping, cache) - feature_for_update.strategies = parsed_strategies - - if 'variants' in parsed_features[feature]: - feature_for_update.variants = Variants( - parsed_features[feature]['variants'], - parsed_features[feature]['name'] - ) - - # Handle creation or deletions - new_features = list(set(feature_names) - set(feature_toggles.keys())) - - for feature in new_features: - feature_toggles[feature] = _create_feature(parsed_features[feature], strategy_mapping, cache) - except KeyError as cache_exception: - LOGGER.warning("Cache Exception: %s", cache_exception) + feature_provisioning = cache.get(FEATURES_URL) + if not feature_provisioning: LOGGER.warning("Unleash client does not have cached features. " "Please make sure client can communicate with Unleash server!") + return + + # Parse provisioning + parsed_features = {} + feature_names = [d["name"] for d in feature_provisioning["features"]] + + for provisioning in feature_provisioning["features"]: + parsed_features[provisioning["name"]] = provisioning + + # Delete old features/cache + for feature in list(feature_toggles.keys()): + if feature not in feature_names: + del feature_toggles[feature] + + # Update existing objects + for feature in feature_toggles.keys(): + feature_for_update = feature_toggles[feature] + strategies = parsed_features[feature]["strategies"] + + feature_for_update.enabled = parsed_features[feature]["enabled"] + if strategies: + parsed_strategies = _create_strategies(parsed_features[feature], strategy_mapping, cache) + feature_for_update.strategies = parsed_strategies + + if 'variants' in parsed_features[feature]: + feature_for_update.variants = Variants( + parsed_features[feature]['variants'], + parsed_features[feature]['name'] + ) + + # Handle creation or deletions + new_features = list(set(feature_names) - set(feature_toggles.keys())) + + for feature in new_features: + feature_toggles[feature] = _create_feature(parsed_features[feature], strategy_mapping, cache) diff --git a/UnleashClient/periodic_tasks/fetch_and_load.py b/UnleashClient/periodic_tasks/fetch_and_load.py index 39be5f70..dabe55c1 100644 --- a/UnleashClient/periodic_tasks/fetch_and_load.py +++ b/UnleashClient/periodic_tasks/fetch_and_load.py @@ -1,8 +1,8 @@ -from fcache.cache import FileCache from UnleashClient.api import get_feature_toggles from UnleashClient.loader import load_features from UnleashClient.constants import FEATURES_URL, ETAG from UnleashClient.utils import LOGGER +from UnleashClient.cache import BaseCache def fetch_and_load_features(url: str, @@ -10,7 +10,7 @@ def fetch_and_load_features(url: str, instance_id: str, custom_headers: dict, custom_options: dict, - cache: FileCache, + cache: BaseCache, features: dict, strategy_mapping: dict, project: str = None) -> None: @@ -21,17 +21,15 @@ def fetch_and_load_features(url: str, custom_headers, custom_options, project, - cache[ETAG] + cache.get(ETAG) ) if feature_provisioning: - cache[FEATURES_URL] = feature_provisioning - cache.sync() + cache.set(FEATURES_URL, feature_provisioning) else: LOGGER.warning("Unable to get feature flag toggles, using cached provisioning.") if etag: - cache[ETAG] = etag - cache.sync() + cache.set(ETAG, etag) load_features(cache, features, strategy_mapping) diff --git a/UnleashClient/periodic_tasks/send_metrics.py b/UnleashClient/periodic_tasks/send_metrics.py index 956d073d..c03c7a5f 100644 --- a/UnleashClient/periodic_tasks/send_metrics.py +++ b/UnleashClient/periodic_tasks/send_metrics.py @@ -1,9 +1,9 @@ from collections import ChainMap from datetime import datetime, timezone -from fcache.cache import FileCache from UnleashClient.api import send_metrics from UnleashClient.constants import METRIC_LAST_SENT_TIME from UnleashClient.utils import LOGGER +from UnleashClient.cache import BaseCache def aggregate_and_send_metrics(url: str, @@ -12,7 +12,7 @@ def aggregate_and_send_metrics(url: str, custom_headers: dict, custom_options: dict, features: dict, - ondisk_cache: FileCache + cache: BaseCache ) -> None: feature_stats_list = [] @@ -34,7 +34,7 @@ def aggregate_and_send_metrics(url: str, "appName": app_name, "instanceId": instance_id, "bucket": { - "start": ondisk_cache[METRIC_LAST_SENT_TIME].isoformat(), + "start": cache.get(METRIC_LAST_SENT_TIME).isoformat(), "stop": datetime.now(timezone.utc).isoformat(), "toggles": dict(ChainMap(*feature_stats_list)) } @@ -42,7 +42,6 @@ def aggregate_and_send_metrics(url: str, if feature_stats_list: send_metrics(url, metrics_request, custom_headers, custom_options) - ondisk_cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc) - ondisk_cache.sync() + cache.set(METRIC_LAST_SENT_TIME, datetime.now(timezone.utc)) else: LOGGER.debug("No feature flags with metrics, skipping metrics submission.") diff --git a/docs/basecache.rst b/docs/basecache.rst new file mode 100644 index 00000000..d1c37550 --- /dev/null +++ b/docs/basecache.rst @@ -0,0 +1,15 @@ +**************************************** +Cache +**************************************** + +.. autoclass:: UnleashClient.cache.BaseCache + + .. automethod:: set + + .. automethod:: mset + + .. automethod:: get + + .. automethod:: exists + + .. automethod:: destroy diff --git a/docs/customcache.rst b/docs/customcache.rst new file mode 100644 index 00000000..e051f66c --- /dev/null +++ b/docs/customcache.rst @@ -0,0 +1,38 @@ +**************************************** +Custom Cache +**************************************** + +Implementing a custom cache +####################################### + +- Create a custom cache object by sub-classing the BaseCache object. +- Overwrite all the methods from the base class. + +.. code-block:: python + + from UnleashClient.cache import BaseCache + from fcache.cache import FileCache as _Filecache + + class FileCache(BaseCache): + # This is specific for FileCache. Depending on the cache you're using, this may look different! + def __init__(self, name: str, directory: Optional[str] = None): + self._cache = _FileCache(name, app_cache_dir=directory) + + def set(self, key: str, value: Any): + self._cache[key] = value + self._cache.sync() + + def mset(self, data: dict): + self._cache.update(data) + self._cache.sync() + + def get(self, key: str, default: Optional[Any] = None): + return self._cache.get(key, default) + + def exists(self, key: str): + return key in self._cache + + def destroy(self): + return self._cache.delete() + +- Initialize your custom cache object and pass it into Unleash using the `cache` argument. diff --git a/docs/index.rst b/docs/index.rst index feef19ba..0b295335 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -20,8 +20,9 @@ Documentation for Unleash's Python client. See the sidebar for more topics! :hidden: customstrategies - wsgi + customcache development + wsgi .. toctree:: :caption: API Documentation @@ -30,6 +31,7 @@ Documentation for Unleash's Python client. See the sidebar for more topics! unleashclient strategy + basecache .. toctree:: :caption: Changelog diff --git a/tests/utilities/decorators.py b/tests/conftest.py similarity index 53% rename from tests/utilities/decorators.py rename to tests/conftest.py index e9bf1e45..d556f62c 100644 --- a/tests/utilities/decorators.py +++ b/tests/conftest.py @@ -1,40 +1,44 @@ import pytest import uuid from datetime import datetime, timezone -from fcache.cache import FileCache from UnleashClient.constants import FEATURES_URL, METRIC_LAST_SENT_TIME, ETAG from tests.utilities.mocks import MOCK_ALL_FEATURES, MOCK_CUSTOM_STRATEGY +from UnleashClient.cache import FileCache @pytest.fixture() def cache_empty(): cache_name = 'pytest_%s' % uuid.uuid4() temporary_cache = FileCache(cache_name) - temporary_cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc) - temporary_cache[ETAG] = '' + temporary_cache.mset({ + METRIC_LAST_SENT_TIME: datetime.now(timezone.utc), + ETAG: '' + }) yield temporary_cache - temporary_cache.delete() + temporary_cache.destroy() @pytest.fixture() def cache_full(): cache_name = 'pytest_%s' % uuid.uuid4() temporary_cache = FileCache(cache_name) - temporary_cache[FEATURES_URL] = MOCK_ALL_FEATURES - temporary_cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc) - temporary_cache[ETAG] = '' - temporary_cache.sync() + temporary_cache.mset({ + FEATURES_URL: MOCK_ALL_FEATURES, + METRIC_LAST_SENT_TIME: datetime.now(timezone.utc), + ETAG: '' + }) yield temporary_cache - temporary_cache.delete() + temporary_cache.destroy() @pytest.fixture() def cache_custom(): cache_name = 'pytest_%s' % uuid.uuid4() temporary_cache = FileCache(cache_name) - temporary_cache[FEATURES_URL] = MOCK_CUSTOM_STRATEGY - temporary_cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc) - temporary_cache[ETAG] = '' - temporary_cache.sync() + temporary_cache.mset({ + FEATURES_URL: MOCK_CUSTOM_STRATEGY, + METRIC_LAST_SENT_TIME: datetime.now(timezone.utc), + ETAG: '' + }) yield temporary_cache - temporary_cache.delete() + temporary_cache.destroy() diff --git a/tests/unit_tests/periodic/test_aggregate_and_send_metrics.py b/tests/unit_tests/periodic/test_aggregate_and_send_metrics.py index 51cff150..d5c7e046 100644 --- a/tests/unit_tests/periodic/test_aggregate_and_send_metrics.py +++ b/tests/unit_tests/periodic/test_aggregate_and_send_metrics.py @@ -1,12 +1,12 @@ import json from datetime import datetime, timezone, timedelta import responses -from fcache.cache import FileCache from tests.utilities.testing_constants import URL, APP_NAME, INSTANCE_ID, CUSTOM_HEADERS, CUSTOM_OPTIONS, IP_LIST from UnleashClient.constants import METRICS_URL, METRIC_LAST_SENT_TIME from UnleashClient.periodic_tasks import aggregate_and_send_metrics from UnleashClient.features import Feature from UnleashClient.strategies import RemoteAddress, Default +from UnleashClient.cache import FileCache FULL_METRICS_URL = URL + METRICS_URL @@ -19,7 +19,7 @@ def test_aggregate_and_send_metrics(): start_time = datetime.now(timezone.utc) - timedelta(seconds=60) cache = FileCache("TestCache") - cache[METRIC_LAST_SENT_TIME] = start_time + cache.set(METRIC_LAST_SENT_TIME, start_time) strategies = [RemoteAddress(parameters={"IPs": IP_LIST}), Default()] my_feature1 = Feature("My Feature1", True, strategies) my_feature1.yes_count = 1 @@ -44,7 +44,7 @@ def test_aggregate_and_send_metrics(): assert request['bucket']["toggles"]["My Feature1"]["yes"] == 1 assert request['bucket']["toggles"]["My Feature1"]["no"] == 1 assert "My Feature3" not in request['bucket']["toggles"].keys() - assert cache[METRIC_LAST_SENT_TIME] > start_time + assert cache.get(METRIC_LAST_SENT_TIME) > start_time @responses.activate @@ -53,7 +53,7 @@ def test_no_metrics(): start_time = datetime.now(timezone.utc) - timedelta(seconds=60) cache = FileCache("TestCache") - cache[METRIC_LAST_SENT_TIME] = start_time + cache.set(METRIC_LAST_SENT_TIME, start_time) strategies = [RemoteAddress(parameters={"IPs": IP_LIST}), Default()] my_feature1 = Feature("My Feature1", True, strategies) diff --git a/tests/unit_tests/periodic/test_fetch_and_load.py b/tests/unit_tests/periodic/test_fetch_and_load.py index 9c9d13a6..725176f5 100644 --- a/tests/unit_tests/periodic/test_fetch_and_load.py +++ b/tests/unit_tests/periodic/test_fetch_and_load.py @@ -4,7 +4,6 @@ from UnleashClient.features import Feature from tests.utilities.mocks.mock_features import MOCK_FEATURE_RESPONSE, MOCK_FEATURE_RESPONSE_PROJECT from tests.utilities.testing_constants import URL, APP_NAME, INSTANCE_ID, CUSTOM_HEADERS, CUSTOM_OPTIONS, DEFAULT_STRATEGY_MAPPING, PROJECT_URL, PROJECT_NAME, ETAG_VALUE -from tests.utilities.decorators import cache_empty # noqa: F401 FULL_FEATURE_URL = URL + FEATURES_URL @@ -27,7 +26,7 @@ def test_fetch_and_load(cache_empty): # noqa: F811 DEFAULT_STRATEGY_MAPPING) assert isinstance(in_memory_features["testFlag"], Feature) - assert temp_cache[ETAG] == ETAG_VALUE + assert temp_cache.get(ETAG) == ETAG_VALUE @responses.activate diff --git a/tests/unit_tests/test_client.py b/tests/unit_tests/test_client.py index 62e536a1..cf58e922 100644 --- a/tests/unit_tests/test_client.py +++ b/tests/unit_tests/test_client.py @@ -13,6 +13,7 @@ from tests.utilities.mocks.mock_features import MOCK_FEATURE_RESPONSE, MOCK_FEATURE_RESPONSE_PROJECT from tests.utilities.mocks.mock_all_features import MOCK_ALL_FEATURES from UnleashClient.constants import REGISTER_URL, FEATURES_URL, METRICS_URL +from UnleashClient.cache import FileCache class EnvironmentStrategy(Strategy): @@ -33,27 +34,32 @@ def apply(self, context: dict = None) -> bool: return default_value +@pytest.fixture +def cache(tmpdir): + return FileCache(APP_NAME, directory=tmpdir.dirname) + + @pytest.fixture() -def unleash_client(tmpdir): +def unleash_client(cache): unleash_client = UnleashClient( URL, APP_NAME, refresh_interval=REFRESH_INTERVAL, metrics_interval=METRICS_INTERVAL, - cache_directory=tmpdir.dirname + cache=cache ) yield unleash_client unleash_client.destroy() @pytest.fixture() -def unleash_client_project(tmpdir): +def unleash_client_project(cache): unleash_client = UnleashClient( URL, APP_NAME, refresh_interval=REFRESH_INTERVAL, metrics_interval=METRICS_INTERVAL, - cache_directory=tmpdir.dirname, + cache=cache, project_name=PROJECT_NAME ) yield unleash_client @@ -61,19 +67,19 @@ def unleash_client_project(tmpdir): @pytest.fixture() -def unleash_client_nodestroy(tmpdir): +def unleash_client_nodestroy(cache): unleash_client = UnleashClient( URL, APP_NAME, refresh_interval=REFRESH_INTERVAL, metrics_interval=METRICS_INTERVAL, - cache_directory=tmpdir.dirname + cache=cache ) yield unleash_client @pytest.fixture() -def unleash_client_toggle_only(tmpdir): +def unleash_client_toggle_only(cache): unleash_client = UnleashClient( URL, APP_NAME, @@ -81,7 +87,7 @@ def unleash_client_toggle_only(tmpdir): metrics_interval=METRICS_INTERVAL, disable_registration=True, disable_metrics=True, - cache_directory=str(tmpdir) + cache=cache ) yield unleash_client unleash_client.destroy() diff --git a/tests/unit_tests/test_loader.py b/tests/unit_tests/test_loader.py index 9354dc1c..2bb8e53c 100644 --- a/tests/unit_tests/test_loader.py +++ b/tests/unit_tests/test_loader.py @@ -6,7 +6,6 @@ from UnleashClient.constants import FEATURES_URL, FAILED_STRATEGIES from tests.utilities.mocks import MOCK_ALL_FEATURES from tests.utilities.testing_constants import DEFAULT_STRATEGY_MAPPING -from tests.utilities.decorators import cache_full, cache_custom # noqa: F401 def test_loader_initialization(cache_full): # noqa: F811 @@ -48,13 +47,12 @@ def test_loader_refresh_strategies(cache_full): # noqa: F811 # Simulate update mutation mock_updated = copy.deepcopy(MOCK_ALL_FEATURES) mock_updated["features"][4]["strategies"][0]["parameters"]["percentage"] = 60 - temp_cache[FEATURES_URL] = mock_updated - temp_cache.sync() + temp_cache.set(FEATURES_URL, mock_updated) load_features(temp_cache, in_memory_features, DEFAULT_STRATEGY_MAPPING) assert in_memory_features["GradualRolloutUserID"].strategies[0].parameters["percentage"] == 60 - assert len(temp_cache[FAILED_STRATEGIES]) == 1 + assert len(temp_cache.get(FAILED_STRATEGIES)) == 1 def test_loader_refresh_variants(cache_full): # noqa: F811 @@ -67,8 +65,7 @@ def test_loader_refresh_variants(cache_full): # noqa: F811 # Simulate update mutation mock_updated = copy.deepcopy(MOCK_ALL_FEATURES) mock_updated["features"][8]["variants"][0]["name"] = "VariantA" - temp_cache[FEATURES_URL] = mock_updated - temp_cache.sync() + temp_cache.set(FEATURES_URL, mock_updated) load_features(temp_cache, in_memory_features, DEFAULT_STRATEGY_MAPPING)