From d91d2d53c10b5d2db25922588826acab3749ab27 Mon Sep 17 00:00:00 2001 From: Henry Jewkes Date: Wed, 3 Mar 2021 19:55:39 -0800 Subject: [PATCH 01/17] Updating logging on full impressions queue --- splitio/storage/inmemmory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index 344af7f3..3cc8a3c7 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -321,8 +321,8 @@ def put(self, impressions): if self._queue_full_hook is not None and callable(self._queue_full_hook): self._queue_full_hook() _LOGGER.warning( - 'Event queue is full, failing to add more events. \n' - 'Consider increasing parameter `eventQueueSize` in configuration' + 'Impression queue is full, failing to add more impressions. \n' + 'Consider increasing parameter `impressionsQueueSize` in configuration' ) return False From 57563377a07645aabe3ff98ac2fb1e535b66c73e Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Mon, 5 Apr 2021 15:05:21 -0300 Subject: [PATCH 02/17] removed six --- setup.py | 1 - splitio/api/telemetry.py | 7 +++---- splitio/client/client.py | 3 +-- splitio/client/input_validator.py | 16 +++++++-------- splitio/client/listener.py | 4 +--- splitio/engine/evaluator.py | 3 +-- splitio/engine/hashfns/murmur3py.py | 3 --- splitio/engine/impressions.py | 4 +--- splitio/models/grammar/condition.py | 3 +-- splitio/models/grammar/matchers/base.py | 5 +---- splitio/models/grammar/matchers/misc.py | 3 +-- splitio/models/grammar/matchers/numeric.py | 3 +-- splitio/models/grammar/matchers/string.py | 9 ++++----- splitio/push/parser.py | 15 +++++--------- splitio/push/processor.py | 4 +--- splitio/push/splitsse.py | 5 ++--- splitio/push/status_tracker.py | 3 +-- splitio/recorder/recorder.py | 6 +----- splitio/storage/__init__.py | 23 +++++++--------------- splitio/storage/adapters/cache_trait.py | 4 +--- splitio/storage/adapters/redis.py | 9 ++++----- splitio/storage/adapters/util.py | 4 +--- splitio/storage/inmemmory.py | 2 +- splitio/sync/event.py | 2 +- splitio/sync/impression.py | 2 +- splitio/sync/synchronizer.py | 4 +--- splitio/sync/telemetry.py | 1 - splitio/tasks/events_sync.py | 2 -- splitio/tasks/impressions_sync.py | 2 -- splitio/tasks/util/asynctask.py | 3 +-- splitio/tasks/util/workerpool.py | 2 +- tests/engine/test_hashfns.py | 2 -- 32 files changed, 51 insertions(+), 108 deletions(-) diff --git a/setup.py b/setup.py index a1372a5e..6470793e 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,6 @@ 'pyyaml>=5.1', 'future>=0.15.2', 'docopt>=0.6.2', - 'six>=1.10.0', 'enum34;python_version<"3.4"', 'futures>=3.0.5;python_version<"3"' ] diff --git a/splitio/api/telemetry.py b/splitio/api/telemetry.py index edf7f22d..395c0179 100644 --- a/splitio/api/telemetry.py +++ b/splitio/api/telemetry.py @@ -1,7 +1,6 @@ """Telemetry API Module.""" import logging -import six from future.utils import raise_from from splitio.api import APIException, headers_from_metadata @@ -39,7 +38,7 @@ def _build_latencies(latencies): """ return [ {'name': name, 'latencies': latencies_list} - for name, latencies_list in six.iteritems(latencies) + for name, latencies_list in latencies.items() ] def flush_latencies(self, latencies): @@ -77,7 +76,7 @@ def _build_gauges(gauges): """ return [ {'name': name, 'value': value} - for name, value in six.iteritems(gauges) + for name, value in gauges.items() ] def flush_gauges(self, gauges): @@ -115,7 +114,7 @@ def _build_counters(counters): """ return [ {'name': name, 'delta': value} - for name, value in six.iteritems(counters) + for name, value in counters.items() ] def flush_counters(self, counters): diff --git a/splitio/client/client.py b/splitio/client/client.py index 2d9b9169..1eb7cfcc 100644 --- a/splitio/client/client.py +++ b/splitio/client/client.py @@ -4,7 +4,6 @@ import logging import time -import six from splitio.engine.evaluator import Evaluator, CONTROL from splitio.engine.splitters import Splitter from splitio.models.impressions import Impression, Label @@ -309,7 +308,7 @@ def get_treatments(self, key, features, attributes=None): """ with_config = self._make_evaluations(key, features, attributes, 'get_treatments', self._METRIC_GET_TREATMENTS) - return {feature: result[0] for (feature, result) in six.iteritems(with_config)} + return {feature: result[0] for (feature, result) in with_config.items()} def _build_impression( # pylint: disable=too-many-arguments self, diff --git a/splitio/client/input_validator.py b/splitio/client/input_validator.py index 7d9598ea..7050c63b 100644 --- a/splitio/client/input_validator.py +++ b/splitio/client/input_validator.py @@ -7,8 +7,6 @@ import re import math -import six - from splitio.api import APIException from splitio.client.key import Key from splitio.engine.evaluator import CONTROL @@ -53,7 +51,7 @@ def _check_is_string(value, name, operation): :return: The result of validation :rtype: True|False """ - if isinstance(value, six.string_types) is False: + if isinstance(value, str) is False: _LOGGER.error( '%s: you passed an invalid %s, %s must be a non-empty string.', operation, name, name @@ -121,7 +119,7 @@ def _check_can_convert(value, name, operation): :return: The result of validation :rtype: None|string """ - if isinstance(value, six.string_types): + if isinstance(value, str): return value else: # check whether if isnan and isinf are really necessary @@ -173,7 +171,7 @@ def _check_valid_object_key(key, name, operation): '%s: you passed a null %s, %s must be a non-empty string.', operation, name, name) return None - if isinstance(key, six.string_types): + if isinstance(key, str): if not _check_string_not_empty(key, name, operation): return None key_str = _check_can_convert(key, name, operation) @@ -515,8 +513,8 @@ def valid_properties(properties): valid_properties = dict() - for property, element in six.iteritems(properties): - if not isinstance(property, six.string_types): # Exclude property if is not string + for property, element in properties.items(): + if not isinstance(property, str): # Exclude property if is not string continue valid_properties[property] = None @@ -525,14 +523,14 @@ def valid_properties(properties): if element is None: continue - if not isinstance(element, six.string_types) and not isinstance(element, Number) \ + if not isinstance(element, str) and not isinstance(element, Number) \ and not isinstance(element, bool): _LOGGER.warning('Property %s is of invalid type. Setting value to None', element) element = None valid_properties[property] = element - if isinstance(element, six.string_types): + if isinstance(element, str): size += len(element) if size > MAX_PROPERTIES_LENGTH_BYTES: diff --git a/splitio/client/listener.py b/splitio/client/listener.py index 1ab61e30..7ab09e99 100644 --- a/splitio/client/listener.py +++ b/splitio/client/listener.py @@ -2,7 +2,6 @@ import abc -from six import add_metaclass from future.utils import raise_from @@ -56,8 +55,7 @@ def log_impression(self, impression, attributes=None): exc ) -@add_metaclass(abc.ABCMeta) #pylint: disable=too-few-public-methods -class ImpressionListener(object): +class ImpressionListener(object, metaclass=abc.ABCMeta): """Impression listener interface.""" @abc.abstractmethod diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index d31e94bd..489c9ba2 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -1,6 +1,5 @@ """Split evaluator module.""" import logging -import six from splitio.models.grammar.condition import ConditionType from splitio.models.impressions import Label @@ -135,7 +134,7 @@ def evaluate_features(self, features, matching_key, bucketing_key, attributes=No return { feature: self._evaluate_treatment(feature, matching_key, bucketing_key, attributes, split) - for (feature, split) in six.iteritems(self._split_storage.fetch_many(features)) + for (feature, split) in self._split_storage.fetch_many(features).items() } def _get_treatment_for_split(self, split, matching_key, bucketing_key, attributes=None): diff --git a/splitio/engine/hashfns/murmur3py.py b/splitio/engine/hashfns/murmur3py.py index 8f9028af..a768a872 100644 --- a/splitio/engine/hashfns/murmur3py.py +++ b/splitio/engine/hashfns/murmur3py.py @@ -3,9 +3,6 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals -from six.moves import range - - def murmur32_py(key, seed=0x0): """ Pure python implementation of murmur32 hash. diff --git a/splitio/engine/impressions.py b/splitio/engine/impressions.py index 91b2eecf..a1184867 100644 --- a/splitio/engine/impressions.py +++ b/splitio/engine/impressions.py @@ -4,8 +4,6 @@ from collections import defaultdict, namedtuple from enum import Enum -import six - from splitio.models.impressions import Impression from splitio.engine.hashfns import murmur_128 from splitio.engine.cache.lru import SimpleLruCache @@ -151,7 +149,7 @@ def pop_all(self): self._data = defaultdict(lambda: 0) return [Counter.CountPerFeature(k.feature, k.timeframe, v) - for (k, v) in six.iteritems(old)] + for (k, v) in old.items()] class Manager(object): # pylint:disable=too-few-public-methods diff --git a/splitio/models/grammar/condition.py b/splitio/models/grammar/condition.py index 961d5f54..d7ef66a4 100644 --- a/splitio/models/grammar/condition.py +++ b/splitio/models/grammar/condition.py @@ -2,7 +2,6 @@ from enum import Enum from future.utils import python_2_unicode_compatible -import six from splitio.models.grammar import matchers from splitio.models.grammar import partitions @@ -103,7 +102,7 @@ def to_json(self): 'label': self._label, 'matcherGroup': { 'combiner': next( - (k, v) for k, v in six.iteritems(_MATCHER_COMBINERS) if v == self._combiner + (k, v) for k, v in _MATCHER_COMBINERS.items() if v == self._combiner )[0], 'matchers': [m.to_json() for m in self.matchers] }, diff --git a/splitio/models/grammar/matchers/base.py b/splitio/models/grammar/matchers/base.py index d7d818d9..0040d700 100644 --- a/splitio/models/grammar/matchers/base.py +++ b/splitio/models/grammar/matchers/base.py @@ -1,13 +1,10 @@ """Abstract matcher module.""" import abc -from six import add_metaclass - from splitio.client.key import Key -@add_metaclass(abc.ABCMeta) -class Matcher(object): +class Matcher(object, metaclass=abc.ABCMeta): """Matcher abstract class.""" def __init__(self, raw_matcher): diff --git a/splitio/models/grammar/matchers/misc.py b/splitio/models/grammar/matchers/misc.py index 335160ed..3c7b1713 100644 --- a/splitio/models/grammar/matchers/misc.py +++ b/splitio/models/grammar/matchers/misc.py @@ -4,7 +4,6 @@ import json from future.utils import python_2_unicode_compatible -from six import string_types from splitio.models.grammar.matchers.base import Matcher @@ -85,7 +84,7 @@ def _match(self, key, attributes=None, context=None): return False if isinstance(matching_data, bool): decoded = matching_data - elif isinstance(matching_data, string_types): + elif isinstance(matching_data, str): try: decoded = json.loads(matching_data.lower()) if not isinstance(decoded, bool): diff --git a/splitio/models/grammar/matchers/numeric.py b/splitio/models/grammar/matchers/numeric.py index fa63a8ea..b912ff7e 100644 --- a/splitio/models/grammar/matchers/numeric.py +++ b/splitio/models/grammar/matchers/numeric.py @@ -3,7 +3,6 @@ import logging from future.utils import python_2_unicode_compatible -from six import string_types from splitio.models.grammar.matchers.base import Matcher from splitio.models import datatypes @@ -32,7 +31,7 @@ def ensure_int(cls, data): if isinstance(data, numbers.Integral) and not isinstance(data, bool): return data - if not isinstance(data, string_types): + if not isinstance(data, str): _LOGGER.error('Cannot convert %s to int. Failing.', type(data)) return None diff --git a/splitio/models/grammar/matchers/string.py b/splitio/models/grammar/matchers/string.py index 6043bb09..80f62de9 100644 --- a/splitio/models/grammar/matchers/string.py +++ b/splitio/models/grammar/matchers/string.py @@ -6,7 +6,6 @@ import json import re from future.utils import python_2_unicode_compatible -from six import string_types from splitio.models.grammar.matchers.base import Matcher @@ -31,7 +30,7 @@ def ensure_string(cls, data): if data is None: # Failed to fetch attribute. no need to convert. return None - if isinstance(data, string_types): + if isinstance(data, str): return data _LOGGER.warning( @@ -120,7 +119,7 @@ def _match(self, key, attributes=None, context=None): matching_data = Sanitizer.ensure_string(self._get_matcher_input(key, attributes)) if matching_data is None: return False - return (isinstance(key, string_types) and + return (isinstance(key, str) and any(matching_data.startswith(s) for s in self._whitelist)) def _add_matcher_specific_properties_to_json(self): @@ -168,7 +167,7 @@ def _match(self, key, attributes=None, context=None): matching_data = Sanitizer.ensure_string(self._get_matcher_input(key, attributes)) if matching_data is None: return False - return (isinstance(key, string_types) and + return (isinstance(key, str) and any(matching_data.endswith(s) for s in self._whitelist)) def _add_matcher_specific_properties_to_json(self): @@ -216,7 +215,7 @@ def _match(self, key, attributes=None, context=None): matching_data = Sanitizer.ensure_string(self._get_matcher_input(key, attributes)) if matching_data is None: return False - return (isinstance(matching_data, string_types) and + return (isinstance(matching_data, str) and any(s in matching_data for s in self._whitelist)) def _add_matcher_specific_properties_to_json(self): diff --git a/splitio/push/parser.py b/splitio/push/parser.py index fddd0a39..9b51097a 100644 --- a/splitio/push/parser.py +++ b/splitio/push/parser.py @@ -4,7 +4,6 @@ from enum import Enum from future.utils import raise_from -from six import add_metaclass from splitio.util.decorators import abstract_property from splitio.util import utctime_ms @@ -51,12 +50,11 @@ class EventParsingException(Exception): pass -@add_metaclass(abc.ABCMeta) #pylint:disable=too-few-public-methods -class BaseEvent(object): +class BaseEvent(object, metaclass=abc.ABCMeta): """Base event that reqiures subclasses tu have a type.""" @abstract_property - def event_type(self): #pylint:disable=no-self-use + def event_type(self): # pylint:disable=no-self-use """ Return the event type. @@ -92,7 +90,7 @@ def __init__(self, code, status_code, message, href): self._timestamp = utctime_ms() @property - def event_type(self): #pylint:disable=no-self-use + def event_type(self): # pylint:disable=no-self-use """ Return the event type. @@ -160,7 +158,6 @@ def should_be_ignored(self): """ return self._code < 40000 or self._code > 49999 - def is_retryable(self): """ Return whether this error is retryable or not. @@ -176,8 +173,7 @@ def __str__(self): (self.code, self.status_code, self.message, self.href) -@add_metaclass(abc.ABCMeta) -class BaseMessage(BaseEvent): +class BaseMessage(BaseEvent, metaclass=abc.ABCMeta): """Message type event.""" def __init__(self, channel, timestamp): @@ -282,8 +278,7 @@ def __str__(self): return "Occupancy - channel=%s, publishers=%d" % (self.channel, self.publishers) -@add_metaclass(abc.ABCMeta) -class BaseUpdate(BaseMessage): +class BaseUpdate(BaseMessage, metaclass=abc.ABCMeta): """Split data update notification.""" def __init__(self, channel, timestamp, change_number): diff --git a/splitio/push/processor.py b/splitio/push/processor.py index 189e7a9a..39329b6b 100644 --- a/splitio/push/processor.py +++ b/splitio/push/processor.py @@ -2,8 +2,6 @@ from queue import Queue -from six import raise_from - from splitio.push.parser import UpdateType from splitio.push.splitworker import SplitWorker from splitio.push.segmentworker import SegmentWorker @@ -83,7 +81,7 @@ def handle(self, event): try: handle = self._handlers[event.update_type] except KeyError as exc: - raise_from('no handler for notification type: %s' % event.update_type, exc) + raise Exception('no handler for notification type: %s' % event.update_type) from exc handle(event) diff --git a/splitio/push/splitsse.py b/splitio/push/splitsse.py index 3f3236ed..e6a60324 100644 --- a/splitio/push/splitsse.py +++ b/splitio/push/splitsse.py @@ -2,7 +2,6 @@ import logging import threading from enum import Enum -import six from splitio.push.sse import SSEClient, SSE_EVENT_ERROR from splitio.util.threadutil import EventGroup @@ -75,9 +74,9 @@ def _format_channels(channels): :returns: channels as a list of strings. :rtype: list[str] """ - regular = [k for (k, v) in six.iteritems(channels) if v == ['subscribe']] + regular = [k for (k, v) in channels.items() if v == ['subscribe']] occupancy = ['[?occupancy=metrics.publishers]' + k - for (k, v) in six.iteritems(channels) + for (k, v) in channels.items() if 'channel-metadata:publishers' in v] return regular + occupancy diff --git a/splitio/push/status_tracker.py b/splitio/push/status_tracker.py index 170b3084..6acd5d95 100644 --- a/splitio/push/status_tracker.py +++ b/splitio/push/status_tracker.py @@ -1,7 +1,6 @@ """NotificationManagerKeeper implementation.""" from enum import Enum import logging -import six from splitio.push.parser import ControlType @@ -195,4 +194,4 @@ def _occupancy_ok(self): :returns: True if publisher count is enough. False otherwise :rtype: bool """ - return any(count > 0 for (chan, count) in six.iteritems(self._publishers)) + return any(count > 0 for (chan, count) in self._publishers.items()) diff --git a/splitio/recorder/recorder.py b/splitio/recorder/recorder.py index 7583e2e5..c009e1eb 100644 --- a/splitio/recorder/recorder.py +++ b/splitio/recorder/recorder.py @@ -3,14 +3,10 @@ import logging -from six import add_metaclass - - _LOGGER = logging.getLogger(__name__) -@add_metaclass(abc.ABCMeta) -class StatsRecorder(object): +class StatsRecorder(object, metaclass=abc.ABCMeta): """StatsRecorder interface.""" @abc.abstractmethod diff --git a/splitio/storage/__init__.py b/splitio/storage/__init__.py index 10f2ade3..a40bc155 100644 --- a/splitio/storage/__init__.py +++ b/splitio/storage/__init__.py @@ -3,11 +3,8 @@ import abc -from six import add_metaclass - -@add_metaclass(abc.ABCMeta) -class SplitStorage(object): +class SplitStorage(object, metaclass=abc.ABCMeta): """Split storage interface implemented as an abstract class.""" @abc.abstractmethod @@ -133,8 +130,7 @@ def kill_locally(self, split_name, default_treatment, change_number): pass -@add_metaclass(abc.ABCMeta) -class SegmentStorage(object): +class SegmentStorage(object, metaclass=abc.ABCMeta): """Segment storage interface implemented as an abstract class.""" @abc.abstractmethod @@ -213,8 +209,7 @@ def segment_contains(self, segment_name, key): pass -@add_metaclass(abc.ABCMeta) -class ImpressionStorage(object): +class ImpressionStorage(object, metaclass=abc.ABCMeta): """Impressions storage interface.""" @abc.abstractmethod @@ -245,8 +240,7 @@ def clear(self): pass -@add_metaclass(abc.ABCMeta) -class ImpressionPipelinedStorage(object): +class ImpressionPipelinedStorage(object, metaclass=abc.ABCMeta): """Impression Pipelined Storage interface.""" @abc.abstractmethod @@ -262,8 +256,7 @@ def add_impressions_to_pipe(self, impressions, pipe): pass -@add_metaclass(abc.ABCMeta) -class EventStorage(object): +class EventStorage(object, metaclass=abc.ABCMeta): """Events storage interface.""" @abc.abstractmethod @@ -294,8 +287,7 @@ def clear(self): pass -@add_metaclass(abc.ABCMeta) -class TelemetryStorage(object): +class TelemetryStorage(object, metaclass=abc.ABCMeta): """Telemetry storage interface.""" @abc.abstractmethod @@ -368,8 +360,7 @@ def clear(self): pass -@add_metaclass(abc.ABCMeta) -class TelemetryPipelinedStorage(object): +class TelemetryPipelinedStorage(object, metaclass=abc.ABCMeta): """Telemetry Pipelined Storage interface.""" @abc.abstractmethod diff --git a/splitio/storage/adapters/cache_trait.py b/splitio/storage/adapters/cache_trait.py index 840befc8..d3db3b67 100644 --- a/splitio/storage/adapters/cache_trait.py +++ b/splitio/storage/adapters/cache_trait.py @@ -4,8 +4,6 @@ import time from functools import update_wrapper -import six - DEFAULT_MAX_AGE = 5 DEFAULT_MAX_SIZE = 100 @@ -90,7 +88,7 @@ def remove_expired(self): """Remove expired elements.""" with self._lock: self._data = { - key: value for (key, value) in six.iteritems(self._data) + key: value for (key, value) in self._data.items() if not self._is_expired(value) } diff --git a/splitio/storage/adapters/redis.py b/splitio/storage/adapters/redis.py index 4052cf0c..19c3a3be 100644 --- a/splitio/storage/adapters/redis.py +++ b/splitio/storage/adapters/redis.py @@ -3,7 +3,6 @@ unicode_literals from builtins import str -from six import string_types, binary_type from future.utils import raise_from try: @@ -78,15 +77,15 @@ def add_prefix(self, k): :returns: Key(s) with prefix if applicable """ if self._prefix: - if isinstance(k, string_types): + if isinstance(k, str): return '{prefix}.{key}'.format(prefix=self._prefix, key=k) elif isinstance(k, list) and k: - if isinstance(k[0], binary_type): + if isinstance(k[0], bytes): return [ '{prefix}.{key}'.format(prefix=self._prefix, key=key.decode("utf8")) for key in k ] - elif isinstance(k[0], string_types): + elif isinstance(k[0], str): return [ '{prefix}.{key}'.format(prefix=self._prefix, key=key) for key in k @@ -109,7 +108,7 @@ def remove_prefix(self, k): :returns: prefix-less key(s) """ if self._prefix: - if isinstance(k, string_types): + if isinstance(k, str): return k[len(self._prefix)+1:] elif isinstance(k, list): return [key[len(self._prefix)+1:] for key in k] diff --git a/splitio/storage/adapters/util.py b/splitio/storage/adapters/util.py index d1dc9c19..f8602602 100644 --- a/splitio/storage/adapters/util.py +++ b/splitio/storage/adapters/util.py @@ -1,7 +1,5 @@ """Custom utilities.""" -import six - class DynamicDecorator(object): #pylint: disable=too-few-public-methods """ @@ -82,7 +80,7 @@ def __init__(self, *args, **kwargs): positional = [pos_func(*args, **kwargs) for pos_func in positional_args_lambdas] keyword = { key: func(*args, **kwargs) - for (key, func) in six.iteritems(keyword_args_lambdas) + for (key, func) in keyword_args_lambdas.items() } # call original class constructor diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index 344af7f3..e5fedc63 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -3,9 +3,9 @@ import logging import threading +import queue from collections import Counter -from six.moves import queue from splitio.models.segments import Segment from splitio.storage import SplitStorage, SegmentStorage, ImpressionStorage, EventStorage, \ TelemetryStorage diff --git a/splitio/sync/event.py b/splitio/sync/event.py index 44fc0d82..06c944b0 100644 --- a/splitio/sync/event.py +++ b/splitio/sync/event.py @@ -1,5 +1,5 @@ import logging -from six.moves import queue +import queue from splitio.api import APIException diff --git a/splitio/sync/impression.py b/splitio/sync/impression.py index a46ef2f6..51505d1c 100644 --- a/splitio/sync/impression.py +++ b/splitio/sync/impression.py @@ -1,5 +1,5 @@ import logging -from six.moves import queue +import queue from splitio.api import APIException diff --git a/splitio/sync/synchronizer.py b/splitio/sync/synchronizer.py index 5e70d84a..b27e9146 100644 --- a/splitio/sync/synchronizer.py +++ b/splitio/sync/synchronizer.py @@ -4,7 +4,6 @@ import logging import threading -from six import add_metaclass from future.utils import raise_from from splitio.api import APIException @@ -130,8 +129,7 @@ def impressions_count_task(self): return self._impressions_count_task -@add_metaclass(abc.ABCMeta) -class BaseSynchronizer(object): +class BaseSynchronizer(object, metaclass=abc.ABCMeta): """Synchronizer interface.""" @abc.abstractmethod diff --git a/splitio/sync/telemetry.py b/splitio/sync/telemetry.py index 82a64a37..f0e48613 100644 --- a/splitio/sync/telemetry.py +++ b/splitio/sync/telemetry.py @@ -1,5 +1,4 @@ import logging -from six.moves import queue from splitio.api import APIException diff --git a/splitio/tasks/events_sync.py b/splitio/tasks/events_sync.py index 4b13a351..45d426cd 100644 --- a/splitio/tasks/events_sync.py +++ b/splitio/tasks/events_sync.py @@ -4,8 +4,6 @@ import logging -from six.moves import queue -from splitio.api import APIException from splitio.tasks import BaseSynchronizationTask from splitio.tasks.util.asynctask import AsyncTask diff --git a/splitio/tasks/impressions_sync.py b/splitio/tasks/impressions_sync.py index ea9c07ca..9ffaa37b 100644 --- a/splitio/tasks/impressions_sync.py +++ b/splitio/tasks/impressions_sync.py @@ -4,8 +4,6 @@ import logging -from six.moves import queue - from splitio.tasks import BaseSynchronizationTask from splitio.tasks.util.asynctask import AsyncTask diff --git a/splitio/tasks/util/asynctask.py b/splitio/tasks/util/asynctask.py index cbeb09ac..63a9f3fc 100644 --- a/splitio/tasks/util/asynctask.py +++ b/splitio/tasks/util/asynctask.py @@ -1,8 +1,7 @@ """Asynchronous tasks that can be controlled.""" import threading import logging - -from six.moves import queue +import queue __TASK_STOP__ = 0 diff --git a/splitio/tasks/util/workerpool.py b/splitio/tasks/util/workerpool.py index ee9c13ca..32957ee6 100644 --- a/splitio/tasks/util/workerpool.py +++ b/splitio/tasks/util/workerpool.py @@ -2,7 +2,7 @@ import logging from threading import Thread, Event -from six.moves import queue +import queue _LOGGER = logging.getLogger(__name__) diff --git a/tests/engine/test_hashfns.py b/tests/engine/test_hashfns.py index c4f87592..36252fc7 100644 --- a/tests/engine/test_hashfns.py +++ b/tests/engine/test_hashfns.py @@ -5,8 +5,6 @@ import os import pytest -import six - from splitio.engine import hashfns, splitters from splitio.engine.hashfns.murmur3py import hash128_x64 as murmur3_128_py from splitio.models import splits From 1ae4b26371b22be2cb1dc70f7836727888619556 Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Mon, 5 Apr 2021 15:10:47 -0300 Subject: [PATCH 03/17] updated travis --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 36ec7ea3..907676c5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,7 +5,6 @@ git: matrix: include: - - python: '2.7' - python: '3.6' after_success: - bash sonar-scanner.sh From de1ada6b2df9e81f0dafcc0e0ac1d0f498157533 Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Mon, 5 Apr 2021 16:49:45 -0300 Subject: [PATCH 04/17] removed future --- setup.py | 8 +-- splitio/__init__.py | 3 - splitio/api/auth.py | 4 +- splitio/api/client.py | 7 +- splitio/api/events.py | 4 +- splitio/api/impressions.py | 6 +- splitio/api/segments.py | 4 +- splitio/api/splits.py | 4 +- splitio/api/telemetry.py | 8 +-- splitio/client/client.py | 3 - splitio/client/config.py | 2 - splitio/client/factory.py | 3 - splitio/client/input_validator.py | 3 - splitio/client/key.py | 2 - splitio/client/listener.py | 12 ++-- splitio/client/localhost.py | 8 --- splitio/client/manager.py | 2 - splitio/engine/hashfns/__init__.py | 12 ++-- splitio/engine/hashfns/legacy.py | 2 - splitio/engine/hashfns/murmur3py.py | 73 ++++++++++----------- splitio/engine/splitters.py | 3 - splitio/exceptions.py | 2 - splitio/models/events.py | 2 - splitio/models/grammar/condition.py | 14 +--- splitio/models/grammar/matchers/__init__.py | 3 - splitio/models/grammar/matchers/keys.py | 15 +---- splitio/models/grammar/matchers/misc.py | 4 -- splitio/models/grammar/matchers/numeric.py | 7 -- splitio/models/grammar/matchers/sets.py | 32 --------- splitio/models/grammar/matchers/string.py | 32 --------- splitio/models/grammar/partitions.py | 8 --- splitio/models/impressions.py | 3 - splitio/models/splits.py | 14 ---- splitio/push/parser.py | 28 ++++---- splitio/push/splitsse.py | 2 +- splitio/push/sse.py | 4 +- splitio/storage/__init__.py | 2 - splitio/storage/adapters/redis.py | 52 +++++++-------- splitio/storage/adapters/uwsgi_cache.py | 9 ++- splitio/storage/inmemmory.py | 2 - splitio/storage/redis.py | 3 - splitio/sync/split.py | 12 +--- splitio/sync/synchronizer.py | 5 +- splitio/tasks/events_sync.py | 3 - splitio/tasks/impressions_sync.py | 3 - tests/client/test_input_validator.py | 5 -- 46 files changed, 110 insertions(+), 329 deletions(-) diff --git a/setup.py b/setup.py index 6470793e..46e3e2e0 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,5 @@ """Setup module.""" -#!/usr/bin/env python +# !/usr/bin/env python from os import path from setuptools import setup, find_packages @@ -16,10 +16,8 @@ INSTALL_REQUIRES = [ 'requests>=2.9.1', 'pyyaml>=5.1', - 'future>=0.15.2', 'docopt>=0.6.2', 'enum34;python_version<"3.4"', - 'futures>=3.0.5;python_version<"3"' ] with open(path.join(path.abspath(path.dirname(__file__)), 'splitio', 'version.py')) as f: @@ -27,12 +25,12 @@ setup( name='splitio_client', - version=__version__, # pylint: disable=undefined-variable + version=__version__, # pylint: disable=undefined-variable description='Split.io Python Client', author='Patricio Echague, Sebastian Arrubia', author_email='pato@split.io, sebastian@split.io', url='https://github.com/splitio/python-client', - download_url=('https://github.com/splitio/python-client/tarball/' + __version__), # pylint: disable=undefined-variable + download_url=('https://github.com/splitio/python-client/tarball/' + __version__), # pylint: disable=undefined-variable license='Apache License 2.0', install_requires=INSTALL_REQUIRES, tests_require=TESTS_REQUIRES, diff --git a/splitio/__init__.py b/splitio/__init__.py index b5cea2b5..aced4602 100644 --- a/splitio/__init__.py +++ b/splitio/__init__.py @@ -1,6 +1,3 @@ -from __future__ import absolute_import, division, print_function, \ - unicode_literals - from splitio.client.factory import get_factory from splitio.client.key import Key from splitio.version import __version__ diff --git a/splitio/api/auth.py b/splitio/api/auth.py index b7fd412e..652c241c 100644 --- a/splitio/api/auth.py +++ b/splitio/api/auth.py @@ -3,8 +3,6 @@ import logging import json -from future.utils import raise_from - from splitio.api import APIException, headers_from_metadata from splitio.api.client import HttpClientException from splitio.models.token import from_raw @@ -53,4 +51,4 @@ def authenticate(self): except HttpClientException as exc: _LOGGER.error('Exception raised while authenticating') _LOGGER.debug('Exception information: ', exc_info=True) - raise_from(APIException('Could not perform authentication.'), exc) + raise APIException('Could not perform authentication.') from exc diff --git a/splitio/api/client.py b/splitio/api/client.py index abafaee4..505547e5 100644 --- a/splitio/api/client.py +++ b/splitio/api/client.py @@ -1,9 +1,6 @@ """Synchronous HTTP Client for split API.""" -from __future__ import division - from collections import namedtuple -from future.utils import raise_from import requests HttpResponse = namedtuple('HttpResponse', ['status_code', 'body']) @@ -107,7 +104,7 @@ def get(self, server, path, apikey, query=None, extra_headers=None): # pylint: ) return HttpResponse(response.status_code, response.text) except Exception as exc: # pylint: disable=broad-except - raise_from(HttpClientException('requests library is throwing exceptions'), exc) + raise HttpClientException('requests library is throwing exceptions') from exc def post(self, server, path, apikey, body, query=None, extra_headers=None): # pylint: disable=too-many-arguments """ @@ -144,4 +141,4 @@ def post(self, server, path, apikey, body, query=None, extra_headers=None): # p ) return HttpResponse(response.status_code, response.text) except Exception as exc: # pylint: disable=broad-except - raise_from(HttpClientException('requests library is throwing exceptions'), exc) + raise HttpClientException('requests library is throwing exceptions') from exc diff --git a/splitio/api/events.py b/splitio/api/events.py index 941b08c9..84b0f52c 100644 --- a/splitio/api/events.py +++ b/splitio/api/events.py @@ -1,8 +1,6 @@ """Events API module.""" import logging -from future.utils import raise_from - from splitio.api import APIException, headers_from_metadata from splitio.api.client import HttpClientException @@ -75,4 +73,4 @@ def flush_events(self, events): except HttpClientException as exc: _LOGGER.error('Error posting events because an exception was raised by the HTTPClient') _LOGGER.debug('Error: ', exc_info=True) - raise_from(APIException('Events not flushed properly.'), exc) + raise APIException('Events not flushed properly.') from exc diff --git a/splitio/api/impressions.py b/splitio/api/impressions.py index 9dc3e94e..ffc1452c 100644 --- a/splitio/api/impressions.py +++ b/splitio/api/impressions.py @@ -3,8 +3,6 @@ import logging from itertools import groupby -from future.utils import raise_from - from splitio.api import APIException, headers_from_metadata from splitio.api.client import HttpClientException from splitio.engine.impressions import ImpressionsMode @@ -107,7 +105,7 @@ def flush_impressions(self, impressions): 'Error posting impressions because an exception was raised by the HTTPClient' ) _LOGGER.debug('Error: ', exc_info=True) - raise_from(APIException('Impressions not flushed properly.'), exc) + raise APIException('Impressions not flushed properly.') from exc def flush_counters(self, counters): """ @@ -133,4 +131,4 @@ def flush_counters(self, counters): 'HTTPClient' ) _LOGGER.debug('Error: ', exc_info=True) - raise_from(APIException('Impressions not flushed properly.'), exc) + raise APIException('Impressions not flushed properly.') from exc diff --git a/splitio/api/segments.py b/splitio/api/segments.py index c4a6d6c3..3221d0d1 100644 --- a/splitio/api/segments.py +++ b/splitio/api/segments.py @@ -3,8 +3,6 @@ import json import logging -from future.utils import raise_from - from splitio.api import APIException, headers_from_metadata from splitio.api.client import HttpClientException @@ -62,4 +60,4 @@ def fetch_segment(self, segment_name, change_number): segment_name ) _LOGGER.debug('Error: ', exc_info=True) - raise_from(APIException('Segments not fetched properly.'), exc) + raise APIException('Segments not fetched properly.') from exc diff --git a/splitio/api/splits.py b/splitio/api/splits.py index a1fb5404..84e3dcfc 100644 --- a/splitio/api/splits.py +++ b/splitio/api/splits.py @@ -3,8 +3,6 @@ import logging import json -from future.utils import raise_from - from splitio.api import APIException, headers_from_metadata from splitio.api.client import HttpClientException @@ -55,4 +53,4 @@ def fetch_splits(self, change_number): except HttpClientException as exc: _LOGGER.error('Error fetching splits because an exception was raised by the HTTPClient') _LOGGER.debug('Error: ', exc_info=True) - raise_from(APIException('Splits not fetched correctly.'), exc) + raise APIException('Splits not fetched correctly.') from exc diff --git a/splitio/api/telemetry.py b/splitio/api/telemetry.py index 395c0179..db4ae682 100644 --- a/splitio/api/telemetry.py +++ b/splitio/api/telemetry.py @@ -1,8 +1,6 @@ """Telemetry API Module.""" import logging -from future.utils import raise_from - from splitio.api import APIException, headers_from_metadata from splitio.api.client import HttpClientException @@ -64,7 +62,7 @@ def flush_latencies(self, latencies): 'Error posting latencies because an exception was raised by the HTTPClient' ) _LOGGER.debug('Error: ', exc_info=True) - raise_from(APIException('Latencies not flushed correctly.'), exc) + raise APIException('Latencies not flushed correctly.') from exc @staticmethod def _build_gauges(gauges): @@ -102,7 +100,7 @@ def flush_gauges(self, gauges): 'Error posting gauges because an exception was raised by the HTTPClient' ) _LOGGER.debug('Error: ', exc_info=True) - raise_from(APIException('Gauges not flushed correctly.'), exc) + raise APIException('Gauges not flushed correctly.') from exc @staticmethod def _build_counters(counters): @@ -140,4 +138,4 @@ def flush_counters(self, counters): 'Error posting counters because an exception was raised by the HTTPClient' ) _LOGGER.debug('Error: ', exc_info=True) - raise_from(APIException('Counters not flushed correctly.'), exc) + raise APIException('Counters not flushed correctly.') from exc diff --git a/splitio/client/client.py b/splitio/client/client.py index 1eb7cfcc..d2e8fa87 100644 --- a/splitio/client/client.py +++ b/splitio/client/client.py @@ -1,7 +1,4 @@ """A module for Split.io SDK API clients.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals - import logging import time from splitio.engine.evaluator import Evaluator, CONTROL diff --git a/splitio/client/config.py b/splitio/client/config.py index 9f7efe98..b051a2c0 100644 --- a/splitio/client/config.py +++ b/splitio/client/config.py @@ -1,6 +1,4 @@ """Default settings for the Split.IO SDK Python client.""" -from __future__ import absolute_import, division, print_function, unicode_literals - import os.path import logging diff --git a/splitio/client/factory.py b/splitio/client/factory.py index f9ea77d2..28389094 100644 --- a/splitio/client/factory.py +++ b/splitio/client/factory.py @@ -1,7 +1,4 @@ """A module for Split.io Factories.""" -from __future__ import absolute_import, division, print_function, unicode_literals - - import logging import threading from collections import Counter diff --git a/splitio/client/input_validator.py b/splitio/client/input_validator.py index 7050c63b..b2478c6c 100644 --- a/splitio/client/input_validator.py +++ b/splitio/client/input_validator.py @@ -1,7 +1,4 @@ """Input validation module.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals - from numbers import Number import logging import re diff --git a/splitio/client/key.py b/splitio/client/key.py index e50d43ba..a21e8574 100644 --- a/splitio/client/key.py +++ b/splitio/client/key.py @@ -1,6 +1,4 @@ """A module for Split.io SDK API clients.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals class Key(object): diff --git a/splitio/client/listener.py b/splitio/client/listener.py index 7ab09e99..3d2ea62c 100644 --- a/splitio/client/listener.py +++ b/splitio/client/listener.py @@ -2,8 +2,6 @@ import abc -from future.utils import raise_from - class ImpressionListenerException(Exception): """Custom Exception for Impression Listener.""" @@ -11,7 +9,7 @@ class ImpressionListenerException(Exception): pass -class ImpressionListenerWrapper(object): #pylint: disable=too-few-public-methods +class ImpressionListenerWrapper(object): # pylint: disable=too-few-public-methods """ Impression listener safe-execution wrapper. @@ -49,11 +47,9 @@ def log_impression(self, impression, attributes=None): data['instance-id'] = self._metadata.instance_name try: self.impression_listener.log_impression(data) - except Exception as exc: #pylint: disable=broad-except - raise_from( - ImpressionListenerException('Error in log_impression user\'s method is throwing exceptions'), - exc - ) + except Exception as exc: # pylint: disable=broad-except + raise ImpressionListenerException('Error in log_impression user\'s method is throwing exceptions') from exc + class ImpressionListener(object, metaclass=abc.ABCMeta): """Impression listener interface.""" diff --git a/splitio/client/localhost.py b/splitio/client/localhost.py index f691d531..99dba5fd 100644 --- a/splitio/client/localhost.py +++ b/splitio/client/localhost.py @@ -1,16 +1,8 @@ """Localhost client mocked components.""" - -import itertools import logging import re -from future.utils import raise_from -import yaml - -from splitio.models import splits from splitio.storage import ImpressionStorage, EventStorage, TelemetryStorage -from splitio.tasks import BaseSynchronizationTask -from splitio.tasks.util import asynctask _LEGACY_COMMENT_LINE_RE = re.compile(r'^#.*$') _LEGACY_DEFINITION_LINE_RE = re.compile(r'^(?[\w_-]+)\s+(?P[\w_-]+)$') diff --git a/splitio/client/manager.py b/splitio/client/manager.py index 74c83a3e..dfb09f5a 100644 --- a/splitio/client/manager.py +++ b/splitio/client/manager.py @@ -1,6 +1,4 @@ """A module for Split.io Managers.""" -from __future__ import absolute_import, division, print_function, unicode_literals - import logging from . import input_validator diff --git a/splitio/engine/hashfns/__init__.py b/splitio/engine/hashfns/__init__.py index 8e581502..31c14a00 100644 --- a/splitio/engine/hashfns/__init__.py +++ b/splitio/engine/hashfns/__init__.py @@ -5,9 +5,6 @@ as well as the optional import (if installed) of a C compiled murmur hash function with python bindings. """ -from __future__ import absolute_import, division, print_function, \ - unicode_literals - from splitio.models.splits import HashAlgorithm from splitio.engine.hashfns import legacy @@ -23,9 +20,9 @@ def _murmur_hash128(key, seed): except ImportError: # Fallback to interpreted python hash algoritm (slower) - from splitio.engine.hashfns import murmur3py #pylint: disable=ungrouped-imports - _murmur_hash = murmur3py.murmur32_py #pylint: disable=invalid-name - _murmur_hash128 = lambda k, s: murmur3py.hash128_x64(k, s)[0] #pylint: disable=invalid-name + from splitio.engine.hashfns import murmur3py # pylint: disable=ungrouped-imports + _murmur_hash = murmur3py.murmur32_py # pylint: disable=invalid-name + _murmur_hash128 = lambda k, s: murmur3py.hash128_x64(k, s)[0] # pylint: disable=invalid-name _HASH_ALGORITHMS = { @@ -33,7 +30,8 @@ def _murmur_hash128(key, seed): HashAlgorithm.MURMUR: _murmur_hash } -murmur_128 = _murmur_hash128 #pylint: disable=invalid-name +murmur_128 = _murmur_hash128 # pylint: disable=invalid-name + def get_hash_fn(algo): """ diff --git a/splitio/engine/hashfns/legacy.py b/splitio/engine/hashfns/legacy.py index 1eb4397c..1a2dc267 100644 --- a/splitio/engine/hashfns/legacy.py +++ b/splitio/engine/hashfns/legacy.py @@ -1,6 +1,4 @@ """Legacy hash function module.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals def as_int32(value): diff --git a/splitio/engine/hashfns/murmur3py.py b/splitio/engine/hashfns/murmur3py.py index a768a872..858427c0 100644 --- a/splitio/engine/hashfns/murmur3py.py +++ b/splitio/engine/hashfns/murmur3py.py @@ -1,7 +1,5 @@ """MurmurHash3 hash module.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals def murmur32_py(key, seed=0x0): """ @@ -72,6 +70,7 @@ def fmix(current_hash): unsigned_val = fmix(hash1 ^ length) return unsigned_val + def hash128_x64(key, seed): """ Pure python implementation of murmurhash3-128. @@ -82,9 +81,9 @@ def hash128_x64(key, seed): def fmix(k): k ^= k >> 33 - k = (k * 0xff51afd7ed558ccd) & 0xFFFFFFFFFFFFFFFF + k = (k * 0xff51afd7ed558ccd) & 0xFFFFFFFFFFFFFFFF k ^= k >> 33 - k = (k * 0xc4ceb9fe1a85ec53) & 0xFFFFFFFFFFFFFFFF + k = (k * 0xc4ceb9fe1a85ec53) & 0xFFFFFFFFFFFFFFFF k ^= k >> 33 return k @@ -97,7 +96,7 @@ def fmix(k): c1 = 0x87c37b91114253d5 c2 = 0x4cf5ad432745937f - #body + # body for block_start in range(0, nblocks * 8, 8): # ??? big endian? k1 = key[2 * block_start + 7] << 56 | \ @@ -106,37 +105,37 @@ def fmix(k): key[2 * block_start + 4] << 32 | \ key[2 * block_start + 3] << 24 | \ key[2 * block_start + 2] << 16 | \ - key[2 * block_start + 1] << 8 | \ + key[2 * block_start + 1] << 8 | \ key[2 * block_start + 0] k2 = key[2 * block_start + 15] << 56 | \ - key[2 * block_start + 14] << 48 | \ - key[2 * block_start + 13] << 40 | \ - key[2 * block_start + 12] << 32 | \ - key[2 * block_start + 11] << 24 | \ - key[2 * block_start + 10] << 16 | \ - key[2 * block_start + 9] << 8 | \ - key[2 * block_start + 8] - - k1 = (c1 * k1) & 0xFFFFFFFFFFFFFFFF - k1 = (k1 << 31 | k1 >> 33) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 - k1 = (c2 * k1) & 0xFFFFFFFFFFFFFFFF + key[2 * block_start + 14] << 48 | \ + key[2 * block_start + 13] << 40 | \ + key[2 * block_start + 12] << 32 | \ + key[2 * block_start + 11] << 24 | \ + key[2 * block_start + 10] << 16 | \ + key[2 * block_start + 9] << 8 | \ + key[2 * block_start + 8] + + k1 = (c1 * k1) & 0xFFFFFFFFFFFFFFFF + k1 = (k1 << 31 | k1 >> 33) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 + k1 = (c2 * k1) & 0xFFFFFFFFFFFFFFFF h1 ^= k1 - h1 = (h1 << 27 | h1 >> 37) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 + h1 = (h1 << 27 | h1 >> 37) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 h1 = (h1 + h2) & 0xFFFFFFFFFFFFFFFF h1 = (h1 * 5 + 0x52dce729) & 0xFFFFFFFFFFFFFFFF - k2 = (c2 * k2) & 0xFFFFFFFFFFFFFFFF - k2 = (k2 << 33 | k2 >> 31) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 - k2 = (c1 * k2) & 0xFFFFFFFFFFFFFFFF + k2 = (c2 * k2) & 0xFFFFFFFFFFFFFFFF + k2 = (k2 << 33 | k2 >> 31) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 + k2 = (c1 * k2) & 0xFFFFFFFFFFFFFFFF h2 ^= k2 - h2 = (h2 << 31 | h2 >> 33) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 + h2 = (h2 << 31 | h2 >> 33) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 h2 = (h1 + h2) & 0xFFFFFFFFFFFFFFFF h2 = (h2 * 5 + 0x38495ab5) & 0xFFFFFFFFFFFFFFFF - #tail + # tail tail_index = nblocks * 16 k1 = 0 k2 = 0 @@ -154,13 +153,13 @@ def fmix(k): k2 ^= key[tail_index + 10] << 16 if tail_size >= 10: k2 ^= key[tail_index + 9] << 8 - if tail_size >= 9: + if tail_size >= 9: k2 ^= key[tail_index + 8] if tail_size > 8: - k2 = (k2 * c2) & 0xFFFFFFFFFFFFFFFF - k2 = (k2 << 33 | k2 >> 31) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 - k2 = (k2 * c1) & 0xFFFFFFFFFFFFFFFF + k2 = (k2 * c2) & 0xFFFFFFFFFFFFFFFF + k2 = (k2 << 33 | k2 >> 31) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 + k2 = (k2 * c1) & 0xFFFFFFFFFFFFFFFF h2 ^= k2 if tail_size >= 8: @@ -181,22 +180,22 @@ def fmix(k): k1 ^= key[tail_index + 0] if tail_size > 0: - k1 = (k1 * c1) & 0xFFFFFFFFFFFFFFFF - k1 = (k1 << 31 | k1 >> 33) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 - k1 = (k1 * c2) & 0xFFFFFFFFFFFFFFFF + k1 = (k1 * c1) & 0xFFFFFFFFFFFFFFFF + k1 = (k1 << 31 | k1 >> 33) & 0xFFFFFFFFFFFFFFFF # inlined ROTL64 + k1 = (k1 * c2) & 0xFFFFFFFFFFFFFFFF h1 ^= k1 - #finalization + # finalization h1 ^= length h2 ^= length - h1 = (h1 + h2) & 0xFFFFFFFFFFFFFFFF - h2 = (h1 + h2) & 0xFFFFFFFFFFFFFFFF + h1 = (h1 + h2) & 0xFFFFFFFFFFFFFFFF + h2 = (h1 + h2) & 0xFFFFFFFFFFFFFFFF - h1 = fmix(h1) - h2 = fmix(h2) + h1 = fmix(h1) + h2 = fmix(h2) - h1 = (h1 + h2) & 0xFFFFFFFFFFFFFFFF - h2 = (h1 + h2) & 0xFFFFFFFFFFFFFFFF + h1 = (h1 + h2) & 0xFFFFFFFFFFFFFFFF + h2 = (h1 + h2) & 0xFFFFFFFFFFFFFFFF return [h1, h2] diff --git a/splitio/engine/splitters.py b/splitio/engine/splitters.py index c7e585bc..e11a2681 100644 --- a/splitio/engine/splitters.py +++ b/splitio/engine/splitters.py @@ -1,7 +1,4 @@ """A module for implementation of the Splitter engine.""" -from __future__ import absolute_import, division, print_function, unicode_literals - - from splitio.engine.evaluator import CONTROL from splitio.engine.hashfns import get_hash_fn diff --git a/splitio/exceptions.py b/splitio/exceptions.py index 0c633d33..f466d2f7 100644 --- a/splitio/exceptions.py +++ b/splitio/exceptions.py @@ -1,5 +1,3 @@ """This module contains everything related to split.io exceptions""" -from __future__ import absolute_import, division, print_function, unicode_literals - from splitio.client.factory import TimeoutException from splitio.storage.adapters.redis import SentinelConfigurationException diff --git a/splitio/models/events.py b/splitio/models/events.py index 2d3ba797..b924417b 100644 --- a/splitio/models/events.py +++ b/splitio/models/events.py @@ -3,8 +3,6 @@ The dto is implemented as a namedtuple for performance matters. """ - -from __future__ import print_function from collections import namedtuple diff --git a/splitio/models/grammar/condition.py b/splitio/models/grammar/condition.py index d7ef66a4..6f49a638 100644 --- a/splitio/models/grammar/condition.py +++ b/splitio/models/grammar/condition.py @@ -1,7 +1,6 @@ """Split conditions module.""" from enum import Enum -from future.utils import python_2_unicode_compatible from splitio.models.grammar import matchers from splitio.models.grammar import partitions @@ -21,7 +20,7 @@ class ConditionType(Enum): class Condition(object): """Condition object class.""" - def __init__( #pylint: disable=too-many-arguments + def __init__( # pylint: disable=too-many-arguments self, matcher_list, combiner, parts, label, @@ -82,19 +81,10 @@ def get_segment_names(self): :rtype: list(str) """ return [ - matcher._segment_name for matcher in self.matchers #pylint: disable=protected-access + matcher._segment_name for matcher in self.matchers # pylint: disable=protected-access if isinstance(matcher, matchers.UserDefinedSegmentMatcher) ] - @python_2_unicode_compatible - def __str__(self): - """Return the string representation of the condition.""" - return '{matcher} then split {parts}'.format( - matcher=self._matchers, parts=','.join( - '{size}:{treatment}'.format(size=partition.size, - treatment=partition.treatment) - for partition in self._partitions)) - def to_json(self): """Return the JSON representation of this condition.""" return { diff --git a/splitio/models/grammar/matchers/__init__.py b/splitio/models/grammar/matchers/__init__.py index f61eb6be..bab9abad 100644 --- a/splitio/models/grammar/matchers/__init__.py +++ b/splitio/models/grammar/matchers/__init__.py @@ -1,7 +1,4 @@ """Matchers entrypoint module.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals - from splitio.models.grammar.matchers.keys import AllKeysMatcher, UserDefinedSegmentMatcher from splitio.models.grammar.matchers.numeric import BetweenMatcher, EqualToMatcher, \ GreaterThanOrEqualMatcher, LessThanOrEqualMatcher diff --git a/splitio/models/grammar/matchers/keys.py b/splitio/models/grammar/matchers/keys.py index 6fcc2584..3df39062 100644 --- a/splitio/models/grammar/matchers/keys.py +++ b/splitio/models/grammar/matchers/keys.py @@ -1,8 +1,7 @@ """Keys matchers module.""" - -from future.utils import python_2_unicode_compatible from splitio.models.grammar.matchers.base import Matcher + class AllKeysMatcher(Matcher): """A matcher that always returns True.""" @@ -31,11 +30,6 @@ def _match(self, key, attributes=None, context=None): """ return key is not None - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'in segment all' - def _add_matcher_specific_properties_to_json(self): """Add matcher specific properties to base dict before returning it.""" return {} @@ -83,10 +77,3 @@ def _add_matcher_specific_properties_to_json(self): 'segmentName': self._segment_name } } - - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'in segment {segment_name}'.format( - segment_name=self._segment_name - ) diff --git a/splitio/models/grammar/matchers/misc.py b/splitio/models/grammar/matchers/misc.py index 3c7b1713..a484db07 100644 --- a/splitio/models/grammar/matchers/misc.py +++ b/splitio/models/grammar/matchers/misc.py @@ -1,9 +1,5 @@ """Miscelaneous matchers that don't fall into other categories.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals - import json -from future.utils import python_2_unicode_compatible from splitio.models.grammar.matchers.base import Matcher diff --git a/splitio/models/grammar/matchers/numeric.py b/splitio/models/grammar/matchers/numeric.py index b912ff7e..4ca3c7b0 100644 --- a/splitio/models/grammar/matchers/numeric.py +++ b/splitio/models/grammar/matchers/numeric.py @@ -1,8 +1,6 @@ """Numeric & Date based matchers.""" import numbers - import logging -from future.utils import python_2_unicode_compatible from splitio.models.grammar.matchers.base import Matcher from splitio.models import datatypes @@ -110,11 +108,6 @@ def _match(self, key, attributes=None, context=None): return False return self._lower <= self.input_parsers[self._data_type](matching_data) <= self._upper - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'between {start} and {end}'.format(start=self._lower, end=self._upper) - def _add_matcher_specific_properties_to_json(self): """Return BetweenMatcher specific properties.""" return { diff --git a/splitio/models/grammar/matchers/sets.py b/splitio/models/grammar/matchers/sets.py index 7c8dfa77..6fe7e3f4 100644 --- a/splitio/models/grammar/matchers/sets.py +++ b/splitio/models/grammar/matchers/sets.py @@ -1,8 +1,4 @@ """Set based matchers module.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals -from future.utils import python_2_unicode_compatible - from splitio.models.grammar.matchers.base import Matcher @@ -49,13 +45,6 @@ def _add_matcher_specific_properties_to_json(self): } } - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'contains all of the following set: [{whitelist}]'.format( - whitelist=','.join('"{}"'.format(item) for item in self._whitelist) - ) - class ContainsAnyOfSetMatcher(Matcher): """Matcher that returns true if the intersection of both sets is not empty.""" @@ -99,13 +88,6 @@ def _add_matcher_specific_properties_to_json(self): } } - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'contains on of the following se: [{whitelist}]'.format( - whitelist=','.join('"{}"'.format(item) for item in self._whitelist) - ) - class EqualToSetMatcher(Matcher): """Matcher that returns true if the set provided by the user is equal to the matcher's one.""" @@ -149,13 +131,6 @@ def _add_matcher_specific_properties_to_json(self): } } - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'equals the following set: [{whitelist}]'.format( - whitelist=','.join('"{}"'.format(item) for item in self._whitelist) - ) - class PartOfSetMatcher(Matcher): """a.""" @@ -199,10 +174,3 @@ def _add_matcher_specific_properties_to_json(self): 'whitelist': list(self._whitelist) } } - - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'is a subset of the following set: [{whitelist}]'.format( - whitelist=','.join('"{}"'.format(item) for item in self._whitelist) - ) diff --git a/splitio/models/grammar/matchers/string.py b/splitio/models/grammar/matchers/string.py index 80f62de9..e32c4004 100644 --- a/splitio/models/grammar/matchers/string.py +++ b/splitio/models/grammar/matchers/string.py @@ -1,11 +1,7 @@ """String matchers module.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals - import logging import json import re -from future.utils import python_2_unicode_compatible from splitio.models.grammar.matchers.base import Matcher @@ -82,13 +78,6 @@ def _add_matcher_specific_properties_to_json(self): } } - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'in whitelist [{whitelist}]'.format( - whitelist=','.join('"{}"'.format(item) for item in self._whitelist) - ) - class StartsWithMatcher(Matcher): """Matcher that returns true if the key is a prefix of the stored value.""" @@ -130,13 +119,6 @@ def _add_matcher_specific_properties_to_json(self): } } - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'has one of the following prefixes [{whitelist}]'.format( - whitelist=','.join('"{}"'.format(item) for item in self._whitelist) - ) - class EndsWithMatcher(Matcher): """Matcher that returns true if the key ends with the suffix stored in matcher data.""" @@ -178,13 +160,6 @@ def _add_matcher_specific_properties_to_json(self): } } - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'has one of the following suffixes [{whitelist}]'.format( - whitelist=','.join('"{}"'.format(item) for item in self._whitelist) - ) - class ContainsStringMatcher(Matcher): """Matcher that returns true if the input key is part of the string in matcher data.""" @@ -226,13 +201,6 @@ def _add_matcher_specific_properties_to_json(self): } } - @python_2_unicode_compatible - def __str__(self): - """Return string Representation.""" - return 'contains one of the following string: [{whitelist}]'.format( - whitelist=','.join('"{}"'.format(item) for item in self._whitelist) - ) - class RegexMatcher(Matcher): """Matcher that returns true if the user input matches the regex stored in the matcher.""" diff --git a/splitio/models/grammar/partitions.py b/splitio/models/grammar/partitions.py index e38d5d98..518af6d3 100644 --- a/splitio/models/grammar/partitions.py +++ b/splitio/models/grammar/partitions.py @@ -1,7 +1,5 @@ """Split partition module.""" -from future.utils import python_2_unicode_compatible - class Partition(object): """Partition object class.""" @@ -38,12 +36,6 @@ def to_json(self): 'size': self._size } - @python_2_unicode_compatible - def __str__(self): - """Return string representation of a partition.""" - return '{size}%:{treatment}'.format(size=self._size, - treatment=self._treatment) - def from_raw(raw_partition): """ diff --git a/splitio/models/impressions.py b/splitio/models/impressions.py index ff753693..b08d31fb 100644 --- a/splitio/models/impressions.py +++ b/splitio/models/impressions.py @@ -1,7 +1,4 @@ """Impressions model module.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals - from collections import namedtuple diff --git a/splitio/models/splits.py b/splitio/models/splits.py index a2863016..84402ed0 100644 --- a/splitio/models/splits.py +++ b/splitio/models/splits.py @@ -1,9 +1,6 @@ """Splits module.""" -from __future__ import absolute_import, division, print_function, unicode_literals - from enum import Enum from collections import namedtuple -from future.utils import python_2_unicode_compatible from splitio.models.grammar import condition @@ -208,17 +205,6 @@ def local_kill(self, default_treatment, change_number): self._change_number = change_number self._killed = True - @python_2_unicode_compatible - def __str__(self): - """Return string representation.""" - return 'name: {name}, seed: {seed}, killed: {killed}, ' \ - 'default treatment: {default_treatment}, ' \ - 'conditions: {conditions}'.format( - name=self._name, seed=self._seed, killed=self._killed, - default_treatment=self._default_treatment, - conditions=','.join(map(str, self._conditions)) - ) - def from_raw(raw_split): """ diff --git a/splitio/push/parser.py b/splitio/push/parser.py index 9b51097a..7d44096b 100644 --- a/splitio/push/parser.py +++ b/splitio/push/parser.py @@ -3,8 +3,6 @@ import json from enum import Enum -from future.utils import raise_from - from splitio.util.decorators import abstract_property from splitio.util import utctime_ms from splitio.push.sse import SSE_EVENT_ERROR, SSE_EVENT_MESSAGE @@ -207,7 +205,7 @@ def timestamp(self): return self._timestamp @property - def event_type(self): #pylint:disable=no-self-use + def event_type(self): # pylint:disable=no-self-use """ Return the event type. @@ -217,7 +215,7 @@ def event_type(self): #pylint:disable=no-self-use return EventType.MESSAGE @abstract_property - def message_type(self): #pylint:disable=no-self-use + def message_type(self): # pylint:disable=no-self-use """ Return the message type. @@ -244,7 +242,7 @@ def __init__(self, channel, timestamp, publishers): self._publishers = publishers @property - def message_type(self): #pylint:disable=no-self-use + def message_type(self): # pylint:disable=no-self-use """ Return the message type. @@ -295,7 +293,7 @@ def __init__(self, channel, timestamp, change_number): self._change_number = change_number @abstract_property - def update_type(self): #pylint:disable=no-self-use + def update_type(self): # pylint:disable=no-self-use """ Return the message type. @@ -305,7 +303,7 @@ def update_type(self): #pylint:disable=no-self-use pass @property - def message_type(self): #pylint:disable=no-self-use + def message_type(self): # pylint:disable=no-self-use """ Return the message type. @@ -333,7 +331,7 @@ def __init__(self, channel, timestamp, change_number): BaseUpdate.__init__(self, channel, timestamp, change_number) @property - def update_type(self): #pylint:disable=no-self-use + def update_type(self): # pylint:disable=no-self-use """ Return the message type. @@ -350,14 +348,14 @@ def __str__(self): class SplitKillUpdate(BaseUpdate): """Split Kill notification.""" - def __init__(self, channel, timestamp, change_number, split_name, default_treatment): #pylint:disable=too-many-arguments + def __init__(self, channel, timestamp, change_number, split_name, default_treatment): # pylint:disable=too-many-arguments """Class constructor.""" BaseUpdate.__init__(self, channel, timestamp, change_number) self._split_name = split_name self._default_treatment = default_treatment @property - def update_type(self): #pylint:disable=no-self-use + def update_type(self): # pylint:disable=no-self-use """ Return the message type. @@ -401,7 +399,7 @@ def __init__(self, channel, timestamp, change_number, segment_name): self._segment_name = segment_name @property - def update_type(self): #pylint:disable=no-self-use + def update_type(self): # pylint:disable=no-self-use """ Return the message type. @@ -434,7 +432,7 @@ def __init__(self, channel, timestamp, control_type): self._control_type = ControlType(control_type) @property - def message_type(self): #pylint:disable=no-self-use + def message_type(self): # pylint:disable=no-self-use """ Return the message type. @@ -536,13 +534,13 @@ def parse_incoming_event(raw_event): try: parsed_data = json.loads(raw_event.data) - except Exception as exc: #pylint:disable=broad-except - raise_from(EventParsingException('Error parsing json'), exc) + except Exception as exc: # pylint:disable=broad-except + raise EventParsingException('Error parsing json') from exc try: event_type = EventType(raw_event.event) except ValueError as exc: - raise_from(exc, 'unknown event type %s' % raw_event.event) + raise Exception('unknown event type %s' % raw_event.event) from exc return { EventType.ERROR: _parse_error, diff --git a/splitio/push/splitsse.py b/splitio/push/splitsse.py index e6a60324..52459378 100644 --- a/splitio/push/splitsse.py +++ b/splitio/push/splitsse.py @@ -133,7 +133,7 @@ def connect(url): def stop(self, blocking=False, timeout=None): """Abort the ongoing connection.""" if self._status == SplitSSEClient._Status.IDLE: - _LOGGER.warn('sse already closed. ignoring') + _LOGGER.warning('sse already closed. ignoring') return self._client.shutdown() diff --git a/splitio/push/sse.py b/splitio/push/sse.py index 01c7de7a..02e0bed7 100644 --- a/splitio/push/sse.py +++ b/splitio/push/sse.py @@ -160,11 +160,11 @@ def start(self, url, extra_headers=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT) def shutdown(self): """Shutdown the current connection.""" if self._conn is None or self._conn.sock is None: - _LOGGER.warn("no sse connection has been started on this SSEClient instance. Ignoring") + _LOGGER.warning("no sse connection has been started on this SSEClient instance. Ignoring") return if self._shutdown_requested: - _LOGGER.warn("shutdown already requested") + _LOGGER.warning("shutdown already requested") return self._shutdown_requested = True diff --git a/splitio/storage/__init__.py b/splitio/storage/__init__.py index a40bc155..8d5a04a6 100644 --- a/splitio/storage/__init__.py +++ b/splitio/storage/__init__.py @@ -1,6 +1,4 @@ """Base storage interfaces.""" -from __future__ import absolute_import - import abc diff --git a/splitio/storage/adapters/redis.py b/splitio/storage/adapters/redis.py index 19c3a3be..9c338d88 100644 --- a/splitio/storage/adapters/redis.py +++ b/splitio/storage/adapters/redis.py @@ -1,9 +1,5 @@ """Redis client wrapper with prefix support.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals - from builtins import str -from future.utils import raise_from try: from redis import StrictRedis @@ -149,7 +145,7 @@ def keys(self, pattern): for key in self._prefix_helper.remove_prefix(self._decorated.keys(self._prefix_helper.add_prefix(pattern))) ] except RedisError as exc: - raise_from(RedisAdapterException('Failed to execute keys operation'), exc) + raise RedisAdapterException('Failed to execute keys operation') from exc def set(self, name, value, *args, **kwargs): """Mimic original redis function but using user custom prefix.""" @@ -158,42 +154,42 @@ def set(self, name, value, *args, **kwargs): self._prefix_helper.add_prefix(name), value, *args, **kwargs ) except RedisError as exc: - raise_from(RedisAdapterException('Failed to execute set operation'), exc) + raise RedisAdapterException('Failed to execute set operation') from exc def get(self, name): """Mimic original redis function but using user custom prefix.""" try: return _bytes_to_string(self._decorated.get(self._prefix_helper.add_prefix(name))) except RedisError as exc: - raise_from(RedisAdapterException('Error executing get operation'), exc) + raise RedisAdapterException('Error executing get operation') from exc def setex(self, name, time, value): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.setex(self._prefix_helper.add_prefix(name), time, value) except RedisError as exc: - raise_from(RedisAdapterException('Error executing setex operation'), exc) + raise RedisAdapterException('Error executing setex operation') from exc def delete(self, *names): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.delete(*self._prefix_helper.add_prefix(list(names))) except RedisError as exc: - raise_from(RedisAdapterException('Error executing delete operation'), exc) + raise RedisAdapterException('Error executing delete operation') from exc def exists(self, name): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.exists(self._prefix_helper.add_prefix(name)) except RedisError as exc: - raise_from(RedisAdapterException('Error executing exists operation'), exc) + raise RedisAdapterException('Error executing exists operation') from exc def lrange(self, key, start, end): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.lrange(self._prefix_helper.add_prefix(key), start, end) except RedisError as exc: - raise_from(RedisAdapterException('Error executing exists operation'), exc) + raise RedisAdapterException('Error executing exists operation') from exc def mget(self, names): """Mimic original redis function but using user custom prefix.""" @@ -203,7 +199,7 @@ def mget(self, names): for item in self._decorated.mget(self._prefix_helper.add_prefix(names)) ] except RedisError as exc: - raise_from(RedisAdapterException('Error executing mget operation'), exc) + raise RedisAdapterException('Error executing mget operation') from exc def smembers(self, name): """Mimic original redis function but using user custom prefix.""" @@ -213,105 +209,105 @@ def smembers(self, name): for item in self._decorated.smembers(self._prefix_helper.add_prefix(name)) ] except RedisError as exc: - raise_from(RedisAdapterException('Error executing smembers operation'), exc) + raise RedisAdapterException('Error executing smembers operation') from exc def sadd(self, name, *values): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.sadd(self._prefix_helper.add_prefix(name), *values) except RedisError as exc: - raise_from(RedisAdapterException('Error executing sadd operation'), exc) + raise RedisAdapterException('Error executing sadd operation') from exc def srem(self, name, *values): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.srem(self._prefix_helper.add_prefix(name), *values) except RedisError as exc: - raise_from(RedisAdapterException('Error executing srem operation'), exc) + raise RedisAdapterException('Error executing srem operation') from exc def sismember(self, name, value): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.sismember(self._prefix_helper.add_prefix(name), value) except RedisError as exc: - raise_from(RedisAdapterException('Error executing sismember operation'), exc) + raise RedisAdapterException('Error executing sismember operation') from exc def eval(self, script, number_of_keys, *keys): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.eval(script, number_of_keys, *self._prefix_helper.add_prefix(list(keys))) except RedisError as exc: - raise_from(RedisAdapterException('Error executing eval operation'), exc) + raise RedisAdapterException('Error executing eval operation') from exc def hset(self, name, key, value): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.hset(self._prefix_helper.add_prefix(name), key, value) except RedisError as exc: - raise_from(RedisAdapterException('Error executing hset operation'), exc) + raise RedisAdapterException('Error executing hset operation') from exc def hget(self, name, key): """Mimic original redis function but using user custom prefix.""" try: return _bytes_to_string(self._decorated.hget(self._prefix_helper.add_prefix(name), key)) except RedisError as exc: - raise_from(RedisAdapterException('Error executing hget operation'), exc) + raise RedisAdapterException('Error executing hget operation') from exc def incr(self, name, amount=1): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.incr(self._prefix_helper.add_prefix(name), amount) except RedisError as exc: - raise_from(RedisAdapterException('Error executing incr operation'), exc) + raise RedisAdapterException('Error executing incr operation') from exc def getset(self, name, value): """Mimic original redis function but using user custom prefix.""" try: return _bytes_to_string(self._decorated.getset(self._prefix_helper.add_prefix(name), value)) except RedisError as exc: - raise_from(RedisAdapterException('Error executing getset operation'), exc) + raise RedisAdapterException('Error executing getset operation') from exc def rpush(self, key, *values): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.rpush(self._prefix_helper.add_prefix(key), *values) except RedisError as exc: - raise_from(RedisAdapterException('Error executing rpush operation'), exc) + raise RedisAdapterException('Error executing rpush operation') from exc def expire(self, key, value): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.expire(self._prefix_helper.add_prefix(key), value) except RedisError as exc: - raise_from(RedisAdapterException('Error executing expire operation'), exc) + raise RedisAdapterException('Error executing expire operation') from exc def rpop(self, key): """Mimic original redis function but using user custom prefix.""" try: return _bytes_to_string(self._decorated.rpop(self._prefix_helper.add_prefix(key))) except RedisError as exc: - raise_from(RedisAdapterException('Error executing rpop operation'), exc) + raise RedisAdapterException('Error executing rpop operation') from exc def ttl(self, key): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.ttl(self._prefix_helper.add_prefix(key)) except RedisError as exc: - raise_from(RedisAdapterException('Error executing ttl operation'), exc) + raise RedisAdapterException('Error executing ttl operation') from exc def lpop(self, key): """Mimic original redis function but using user custom prefix.""" try: return self._decorated.lpop(self._prefix_helper.add_prefix(key)) except RedisError as exc: - raise_from(RedisAdapterException('Error executing lpop operation'), exc) + raise RedisAdapterException('Error executing lpop operation') from exc def pipeline(self): """Mimic original redis pipeline.""" try: return RedisPipelineAdapter(self._decorated, self._prefix_helper) except RedisError as exc: - raise_from(RedisAdapterException('Error executing ttl operation'), exc) + raise RedisAdapterException('Error executing ttl operation') from exc class RedisPipelineAdapter(object): @@ -344,7 +340,7 @@ def execute(self): try: return self._pipe.execute() except RedisError as exc: - raise_from(RedisAdapterException('Error executing pipeline operation'), exc) + raise RedisAdapterException('Error executing pipeline operation') from exc def _build_default_client(config): # pylint: disable=too-many-locals diff --git a/splitio/storage/adapters/uwsgi_cache.py b/splitio/storage/adapters/uwsgi_cache.py index f6b908b6..3cf41150 100644 --- a/splitio/storage/adapters/uwsgi_cache.py +++ b/splitio/storage/adapters/uwsgi_cache.py @@ -1,13 +1,12 @@ """UWSGI Cache Storage adapter module.""" -from __future__ import absolute_import, division, print_function, unicode_literals import time try: - #uwsgi is loaded at runtime by uwsgi app. + # uwsgi is loaded at runtime by uwsgi app. import uwsgi except ImportError: - def missing_uwsgi_dependencies(*args, **kwargs): #pylint: disable=unused-argument + def missing_uwsgi_dependencies(*args, **kwargs): # pylint: disable=unused-argument """Only complain for missing deps if they're used.""" raise NotImplementedError('Missing uWSGI support dependencies.') uwsgi = missing_uwsgi_dependencies @@ -96,14 +95,14 @@ def cache_get(self, key, cache_namespace='default'): return self._cache[cache_namespace][key] return None - def cache_set(self, key, value, expires=0, cache_namespace='default'): #pylint: disable=unused-argument + def cache_set(self, key, value, expires=0, cache_namespace='default'): # pylint: disable=unused-argument """Set an elemen in the cache.""" self._check_string_data_type(value) if cache_namespace in self._cache: self._cache[cache_namespace][key] = value else: - self._cache[cache_namespace] = {key:value} + self._cache[cache_namespace] = {key: value} def cache_update(self, key, value, expires=0, cache_namespace='default'): """Update an element.""" diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index e5fedc63..46494a7a 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -1,6 +1,4 @@ """In memory storage classes.""" -from __future__ import absolute_import - import logging import threading import queue diff --git a/splitio/storage/redis.py b/splitio/storage/redis.py index a8fe51b3..c747c1f3 100644 --- a/splitio/storage/redis.py +++ b/splitio/storage/redis.py @@ -1,7 +1,4 @@ """Redis storage module.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals - import json import logging diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 37221907..a86ca9c9 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -2,8 +2,6 @@ import logging import re import itertools - -from future.utils import raise_from import yaml from splitio.api import APIException @@ -192,10 +190,7 @@ def _read_splits_from_legacy_file(cls, filename): return to_return except IOError as exc: - raise_from( - ValueError("Error parsing file %s. Make sure it's readable." % filename), - exc - ) + raise ValueError("Error parsing file %s. Make sure it's readable." % filename) from exc @classmethod def _read_splits_from_yaml_file(cls, filename): @@ -234,10 +229,7 @@ def _read_splits_from_yaml_file(cls, filename): return to_return except IOError as exc: - raise_from( - ValueError("Error parsing file %s. Make sure it's readable." % filename), - exc - ) + raise ValueError("Error parsing file %s. Make sure it's readable." % filename) from exc def synchronize_splits(self, till=None): # pylint:disable=unused-argument """Update splits in storage.""" diff --git a/splitio/sync/synchronizer.py b/splitio/sync/synchronizer.py index b27e9146..0bfeb0ca 100644 --- a/splitio/sync/synchronizer.py +++ b/splitio/sync/synchronizer.py @@ -4,7 +4,6 @@ import logging import threading -from future.utils import raise_from from splitio.api import APIException @@ -268,7 +267,7 @@ def sync_all(self): # Only retrying splits, since segments may trigger too many calls. if not self._synchronize_segments(): - _LOGGER.warn('Segments failed to synchronize.') + _LOGGER.warning('Segments failed to synchronize.') # All is good return @@ -371,7 +370,7 @@ def sync_all(self): self._split_synchronizers.split_sync.synchronize_splits(None) except APIException as exc: _LOGGER.error('Failed syncing splits') - raise_from(APIException('Failed to sync splits'), exc) + raise APIException('Failed to sync splits') from exc def start_periodic_fetching(self): """Start fetchers for splits and segments.""" diff --git a/splitio/tasks/events_sync.py b/splitio/tasks/events_sync.py index 45d426cd..bddcfd2c 100644 --- a/splitio/tasks/events_sync.py +++ b/splitio/tasks/events_sync.py @@ -1,7 +1,4 @@ """Events syncrhonization task.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals - import logging from splitio.tasks import BaseSynchronizationTask diff --git a/splitio/tasks/impressions_sync.py b/splitio/tasks/impressions_sync.py index 9ffaa37b..bfcc8993 100644 --- a/splitio/tasks/impressions_sync.py +++ b/splitio/tasks/impressions_sync.py @@ -1,7 +1,4 @@ """Impressions syncrhonization task.""" -from __future__ import absolute_import, division, print_function, \ - unicode_literals - import logging from splitio.tasks import BaseSynchronizationTask diff --git a/tests/client/test_input_validator.py b/tests/client/test_input_validator.py index 8204705d..b52c2e2c 100644 --- a/tests/client/test_input_validator.py +++ b/tests/client/test_input_validator.py @@ -1,9 +1,4 @@ """Unit tests for the input_validator module.""" -# pylint: disable=protected-access,too-many-statements,no-self-use,line-too-long - -from __future__ import absolute_import, division, print_function, \ - unicode_literals - import logging from splitio.client.factory import SplitFactory, get_factory From 2a385d2b4a34122ce0f6901507fd5952659ad822 Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Tue, 6 Apr 2021 18:07:51 -0300 Subject: [PATCH 05/17] more removals --- README.md | 2 +- doc/source/introduction.rst | 2 +- setup.py | 9 +++--- splitio/push/sse.py | 40 ++++--------------------- splitio/storage/adapters/redis.py | 8 +---- splitio/util/decorators.py | 8 ++--- splitio/util/threadutil.py | 6 ++-- tests/integration/test_streaming_e2e.py | 6 +--- tests/storage/test_redis.py | 1 - 9 files changed, 18 insertions(+), 64 deletions(-) diff --git a/README.md b/README.md index 67a67427..05db5456 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ This SDK is designed to work with Split, the platform for controlled rollouts, w [![Twitter Follow](https://img.shields.io/twitter/follow/splitsoftware.svg?style=social&label=Follow&maxAge=1529000)](https://twitter.com/intent/follow?screen_name=splitsoftware) ## Compatibility -This SDK is compatible with **Python 2.7 and higher**. +This SDK is compatible with **Python 3 and higher**. ## Getting started Below is a simple example that describes the instantiation and most basic usage of our SDK: diff --git a/doc/source/introduction.rst b/doc/source/introduction.rst index f898e3e5..bcad2158 100644 --- a/doc/source/introduction.rst +++ b/doc/source/introduction.rst @@ -6,7 +6,7 @@ This project provides Python programs access to the `Split.io Installation and Requirements ----------------------------- -``splitio_client`` supports both Python 2 (2.7 or later) and Python 3 (3.3 or later). Stable versions can be installed from `PyPI `_ using pip: :: +``splitio_client`` supports Python 3 (3.3 or later). Stable versions can be installed from `PyPI `_ using pip: :: pip install splitio_client diff --git a/setup.py b/setup.py index 46e3e2e0..ade4c52e 100644 --- a/setup.py +++ b/setup.py @@ -6,16 +6,15 @@ TESTS_REQUIRES = [ 'flake8', - 'pytest<=4.6', # for deprecated python versions: https://docs.pytest.org/en/latest/py27-py34-deprecation.html - 'pytest-mock==2.0.0', + 'pytest>=6.2.3', + 'pytest-mock>=3.5.1', 'coverage', 'pytest-cov', - 'mock;python_version<"3"' ] INSTALL_REQUIRES = [ - 'requests>=2.9.1', - 'pyyaml>=5.1', + 'requests>=2.25.1', + 'pyyaml>=5.4.1', 'docopt>=0.6.2', 'enum34;python_version<"3.4"', ] diff --git a/splitio/push/sse.py b/splitio/push/sse.py index 02e0bed7..1cbf8a5c 100644 --- a/splitio/push/sse.py +++ b/splitio/push/sse.py @@ -1,15 +1,9 @@ """Low-level SSE Client.""" import logging import socket -import sys from collections import namedtuple - -try: # try to import python3 names. fallback to python2 - from http.client import HTTPConnection, HTTPSConnection - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - from httplib import HTTPConnection, HTTPSConnection +from http.client import HTTPConnection, HTTPSConnection +from urllib.parse import urlparse _LOGGER = logging.getLogger(__name__) @@ -23,30 +17,6 @@ __ENDING_CHARS = set(['\n', '']) -def __httpresponse_readline_py2(response): - """ - Hacky `readline` implementation to be used with chunked transfers in python2. - - This makes syscalls in a loop, so not particularly efficient. Migrate to py3 now! - - :param response: HTTPConnection's response after a .request() call - :type response: httplib.HTTPResponse - - :returns: a string with the read line - :rtype: str - """ - buf = [] - while True: - read = response.read(1) - buf.append(read) - if read in __ENDING_CHARS: - break - - return ''.join(buf) - - -_http_response_readline = (__httpresponse_readline_py2 if sys.version_info.major <= 2 #pylint:disable=invalid-name - else lambda response: response.readline()) class EventBuilder(object): @@ -105,7 +75,7 @@ def _read_events(self): response = self._conn.getresponse() event_builder = EventBuilder() while True: - line = _http_response_readline(response) + line = response.readline() if line is None or len(line) <= 0: # connection ended break elif line.startswith(b':'): # comment. Skip @@ -118,7 +88,7 @@ def _read_events(self): event_builder = EventBuilder() else: event_builder.process_line(line) - except Exception: #pylint:disable=broad-except + except Exception: # pylint:disable=broad-except _LOGGER.debug('sse connection ended.') _LOGGER.debug('stack trace: ', exc_info=True) finally: @@ -127,7 +97,7 @@ def _read_events(self): return self._shutdown_requested - def start(self, url, extra_headers=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): #pylint:disable=protected-access + def start(self, url, extra_headers=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): # pylint:disable=protected-access """ Connect and start listening for events. diff --git a/splitio/storage/adapters/redis.py b/splitio/storage/adapters/redis.py index 9c338d88..5fa372bb 100644 --- a/splitio/storage/adapters/redis.py +++ b/splitio/storage/adapters/redis.py @@ -63,13 +63,7 @@ def add_prefix(self, k): """ Add a prefix to the contents of k. - 'k' may be: - - a single key (of type string or unicode in python2, or type string - in python 3. In which case we simple add a prefix with a dot. - - a list, in which the prefix is applied to element. - If no user prefix is stored, the key/list of keys will be returned as is - - :param k: single (string) or list of (list) keys. + :param k: single (string). :returns: Key(s) with prefix if applicable """ if self._prefix: diff --git a/splitio/util/decorators.py b/splitio/util/decorators.py index 960b5921..e0775dea 100644 --- a/splitio/util/decorators.py +++ b/splitio/util/decorators.py @@ -1,11 +1,10 @@ """Misc decorators.""" -import sys +from abc import abstractmethod -from abc import abstractmethod, abstractproperty def abstract_property(func): """ - Python2/3 compatible abstract property decorator. + Abstract property decorator. :param func: method to decorate :type func: callable @@ -13,5 +12,4 @@ def abstract_property(func): :returns: decorated function :rtype: callable """ - return (property(abstractmethod(func)) if sys.version_info > (3, 3) - else abstractproperty(func)) + return property(abstractmethod(func)) diff --git a/splitio/util/threadutil.py b/splitio/util/threadutil.py index f76b4590..db558a58 100644 --- a/splitio/util/threadutil.py +++ b/splitio/util/threadutil.py @@ -1,16 +1,14 @@ """Threading utilities.""" -from inspect import isclass import threading -# python2 workaround -_EventClass = threading.Event if isclass(threading.Event) else threading._Event #pylint:disable=protected-access,invalid-name +_EventClass = threading.Event class EventGroup(object): """EventGroup that can be waited with an OR condition.""" - class Event(_EventClass): #pylint:disable=too-few-public-methods + class Event(_EventClass): # pylint:disable=too-few-public-methods """Threading event meant to be used in an group.""" def __init__(self, shared_condition): diff --git a/tests/integration/test_streaming_e2e.py b/tests/integration/test_streaming_e2e.py index b9dc15c3..48fd59c0 100644 --- a/tests/integration/test_streaming_e2e.py +++ b/tests/integration/test_streaming_e2e.py @@ -7,11 +7,7 @@ from queue import Queue from splitio.client.factory import get_factory from tests.helpers.mockserver import SSEMockServer, SplitMockServer - -try: # try to import python3 names. fallback to python2 - from urllib.parse import parse_qs -except ImportError: - from urlparse import parse_qs +from urllib.parse import parse_qs class StreamingIntegrationTests(object): diff --git a/tests/storage/test_redis.py b/tests/storage/test_redis.py index 4d775f61..35493043 100644 --- a/tests/storage/test_redis.py +++ b/tests/storage/test_redis.py @@ -366,7 +366,6 @@ def test_add_events(self, mocker): } }) for e in events] - # To deal with python2 & 3 differences in hashing/order when dumping json. list_of_raw_json_strings_called = adapter.rpush.mock_calls[0][1][1:] list_of_events_called = [json.loads(event) for event in list_of_raw_json_strings_called] list_of_events_sent = [json.loads(event) for event in list_of_raw_events] From 5ce20b35064dea96295fa7f8e22e1d3f31b9fecc Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Wed, 7 Apr 2021 11:36:33 -0300 Subject: [PATCH 06/17] fixed --- splitio/models/grammar/condition.py | 8 ++++++++ splitio/models/grammar/matchers/keys.py | 10 +++++++++ splitio/models/grammar/matchers/numeric.py | 4 ++++ splitio/models/grammar/matchers/sets.py | 24 ++++++++++++++++++++++ splitio/models/grammar/matchers/string.py | 24 ++++++++++++++++++++++ splitio/models/grammar/partitions.py | 5 +++++ splitio/models/splits.py | 10 +++++++++ 7 files changed, 85 insertions(+) diff --git a/splitio/models/grammar/condition.py b/splitio/models/grammar/condition.py index 6f49a638..d38e6991 100644 --- a/splitio/models/grammar/condition.py +++ b/splitio/models/grammar/condition.py @@ -85,6 +85,14 @@ def get_segment_names(self): if isinstance(matcher, matchers.UserDefinedSegmentMatcher) ] + def __str__(self): + """Return the string representation of the condition.""" + return '{matcher} then split {parts}'.format( + matcher=self._matchers, parts=','.join( + '{size}:{treatment}'.format(size=partition.size, + treatment=partition.treatment) + for partition in self._partitions)) + def to_json(self): """Return the JSON representation of this condition.""" return { diff --git a/splitio/models/grammar/matchers/keys.py b/splitio/models/grammar/matchers/keys.py index 3df39062..7f10fec8 100644 --- a/splitio/models/grammar/matchers/keys.py +++ b/splitio/models/grammar/matchers/keys.py @@ -30,6 +30,10 @@ def _match(self, key, attributes=None, context=None): """ return key is not None + def __str__(self): + """Return string Representation.""" + return 'in segment all' + def _add_matcher_specific_properties_to_json(self): """Add matcher specific properties to base dict before returning it.""" return {} @@ -77,3 +81,9 @@ def _add_matcher_specific_properties_to_json(self): 'segmentName': self._segment_name } } + + def __str__(self): + """Return string Representation.""" + return 'in segment {segment_name}'.format( + segment_name=self._segment_name + ) diff --git a/splitio/models/grammar/matchers/numeric.py b/splitio/models/grammar/matchers/numeric.py index 4ca3c7b0..a722da0d 100644 --- a/splitio/models/grammar/matchers/numeric.py +++ b/splitio/models/grammar/matchers/numeric.py @@ -108,6 +108,10 @@ def _match(self, key, attributes=None, context=None): return False return self._lower <= self.input_parsers[self._data_type](matching_data) <= self._upper + def __str__(self): + """Return string Representation.""" + return 'between {start} and {end}'.format(start=self._lower, end=self._upper) + def _add_matcher_specific_properties_to_json(self): """Return BetweenMatcher specific properties.""" return { diff --git a/splitio/models/grammar/matchers/sets.py b/splitio/models/grammar/matchers/sets.py index 6fe7e3f4..49890a98 100644 --- a/splitio/models/grammar/matchers/sets.py +++ b/splitio/models/grammar/matchers/sets.py @@ -45,6 +45,12 @@ def _add_matcher_specific_properties_to_json(self): } } + def __str__(self): + """Return string Representation.""" + return 'contains all of the following set: [{whitelist}]'.format( + whitelist=','.join('"{}"'.format(item) for item in self._whitelist) + ) + class ContainsAnyOfSetMatcher(Matcher): """Matcher that returns true if the intersection of both sets is not empty.""" @@ -88,6 +94,12 @@ def _add_matcher_specific_properties_to_json(self): } } + def __str__(self): + """Return string Representation.""" + return 'contains on of the following se: [{whitelist}]'.format( + whitelist=','.join('"{}"'.format(item) for item in self._whitelist) + ) + class EqualToSetMatcher(Matcher): """Matcher that returns true if the set provided by the user is equal to the matcher's one.""" @@ -131,6 +143,12 @@ def _add_matcher_specific_properties_to_json(self): } } + def __str__(self): + """Return string Representation.""" + return 'equals the following set: [{whitelist}]'.format( + whitelist=','.join('"{}"'.format(item) for item in self._whitelist) + ) + class PartOfSetMatcher(Matcher): """a.""" @@ -174,3 +192,9 @@ def _add_matcher_specific_properties_to_json(self): 'whitelist': list(self._whitelist) } } + + def __str__(self): + """Return string Representation.""" + return 'is a subset of the following set: [{whitelist}]'.format( + whitelist=','.join('"{}"'.format(item) for item in self._whitelist) + ) diff --git a/splitio/models/grammar/matchers/string.py b/splitio/models/grammar/matchers/string.py index e32c4004..788972c6 100644 --- a/splitio/models/grammar/matchers/string.py +++ b/splitio/models/grammar/matchers/string.py @@ -78,6 +78,12 @@ def _add_matcher_specific_properties_to_json(self): } } + def __str__(self): + """Return string Representation.""" + return 'in whitelist [{whitelist}]'.format( + whitelist=','.join('"{}"'.format(item) for item in self._whitelist) + ) + class StartsWithMatcher(Matcher): """Matcher that returns true if the key is a prefix of the stored value.""" @@ -119,6 +125,12 @@ def _add_matcher_specific_properties_to_json(self): } } + def __str__(self): + """Return string Representation.""" + return 'has one of the following prefixes [{whitelist}]'.format( + whitelist=','.join('"{}"'.format(item) for item in self._whitelist) + ) + class EndsWithMatcher(Matcher): """Matcher that returns true if the key ends with the suffix stored in matcher data.""" @@ -160,6 +172,12 @@ def _add_matcher_specific_properties_to_json(self): } } + def __str__(self): + """Return string Representation.""" + return 'has one of the following suffixes [{whitelist}]'.format( + whitelist=','.join('"{}"'.format(item) for item in self._whitelist) + ) + class ContainsStringMatcher(Matcher): """Matcher that returns true if the input key is part of the string in matcher data.""" @@ -201,6 +219,12 @@ def _add_matcher_specific_properties_to_json(self): } } + def __str__(self): + """Return string Representation.""" + return 'contains one of the following string: [{whitelist}]'.format( + whitelist=','.join('"{}"'.format(item) for item in self._whitelist) + ) + class RegexMatcher(Matcher): """Matcher that returns true if the user input matches the regex stored in the matcher.""" diff --git a/splitio/models/grammar/partitions.py b/splitio/models/grammar/partitions.py index 518af6d3..51f84ac6 100644 --- a/splitio/models/grammar/partitions.py +++ b/splitio/models/grammar/partitions.py @@ -36,6 +36,11 @@ def to_json(self): 'size': self._size } + def __str__(self): + """Return string representation of a partition.""" + return '{size}%:{treatment}'.format(size=self._size, + treatment=self._treatment) + def from_raw(raw_partition): """ diff --git a/splitio/models/splits.py b/splitio/models/splits.py index 84402ed0..5e0ab394 100644 --- a/splitio/models/splits.py +++ b/splitio/models/splits.py @@ -205,6 +205,16 @@ def local_kill(self, default_treatment, change_number): self._change_number = change_number self._killed = True + def __str__(self): + """Return string representation.""" + return 'name: {name}, seed: {seed}, killed: {killed}, ' \ + 'default treatment: {default_treatment}, ' \ + 'conditions: {conditions}'.format( + name=self._name, seed=self._seed, killed=self._killed, + default_treatment=self._default_treatment, + conditions=','.join(map(str, self._conditions)) + ) + def from_raw(raw_split): """ From 8947da4a25d5a29e4dae9a2f68b1ae14993189a6 Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Thu, 8 Apr 2021 10:55:07 -0300 Subject: [PATCH 07/17] improvements --- setup.py | 4 ++-- splitio/util/threadutil.py | 13 +++++-------- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/setup.py b/setup.py index ade4c52e..e67c7a27 100644 --- a/setup.py +++ b/setup.py @@ -13,8 +13,8 @@ ] INSTALL_REQUIRES = [ - 'requests>=2.25.1', - 'pyyaml>=5.4.1', + 'requests>=2.9.1', + 'pyyaml>=5.1', 'docopt>=0.6.2', 'enum34;python_version<"3.4"', ] diff --git a/splitio/util/threadutil.py b/splitio/util/threadutil.py index db558a58..184f7186 100644 --- a/splitio/util/threadutil.py +++ b/splitio/util/threadutil.py @@ -1,14 +1,11 @@ """Threading utilities.""" -import threading - - -_EventClass = threading.Event +from threading import Event, Condition class EventGroup(object): """EventGroup that can be waited with an OR condition.""" - class Event(_EventClass): # pylint:disable=too-few-public-methods + class Event(Event): # pylint:disable=too-few-public-methods """Threading event meant to be used in an group.""" def __init__(self, shared_condition): @@ -18,18 +15,18 @@ def __init__(self, shared_condition): :param shared_condition: shared condition varaible. :type shared_condition: threading.Condition """ - _EventClass.__init__(self) + Event.__init__(self) self._shared_cond = shared_condition def set(self): """Set the event.""" - _EventClass.set(self) + Event.set(self) with self._shared_cond: self._shared_cond.notify() def __init__(self): """Construct an event group.""" - self._cond = threading.Condition() + self._cond = Condition() def make_event(self): """ From d23e9796be3ddb96cf6fac1b879e1ff54f09ef38 Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Thu, 8 Apr 2021 12:26:49 -0300 Subject: [PATCH 08/17] encoding python3 --- splitio/client/config.py | 4 +-- splitio/storage/adapters/redis.py | 26 +++++++------------ tests/.DS_Store | Bin 0 -> 6148 bytes tests/client/test_factory.py | 6 ++--- tests/integration/test_client_e2e.py | 7 ++--- tests/storage/adapters/test_redis_adapter.py | 6 ++--- 6 files changed, 21 insertions(+), 28 deletions(-) create mode 100644 tests/.DS_Store diff --git a/splitio/client/config.py b/splitio/client/config.py index b051a2c0..98dbfec8 100644 --- a/splitio/client/config.py +++ b/splitio/client/config.py @@ -41,9 +41,9 @@ 'redisUnixSocketPath': None, 'redisEncoding': 'utf-8', 'redisEncodingErrors': 'strict', - 'redisCharset': None, + 'redisCharset': 'utf-8', 'redisErrors': None, - 'redisDecodeResponses': False, + 'redisDecodeResponses': True, 'redisRetryOnTimeout': False, 'redisSsl': False, 'redisSslKeyfile': None, diff --git a/splitio/storage/adapters/redis.py b/splitio/storage/adapters/redis.py index 5fa372bb..35575eff 100644 --- a/splitio/storage/adapters/redis.py +++ b/splitio/storage/adapters/redis.py @@ -15,12 +15,6 @@ def missing_redis_dependencies(*_, **__): StrictRedis = Sentinel = missing_redis_dependencies -def _bytes_to_string(maybe_bytes, encode='utf-8'): - if type(maybe_bytes).__name__ == 'bytes': - return str(maybe_bytes, encode) - return maybe_bytes - - class RedisAdapterException(Exception): """Exception to be thrown when a redis command fails with an exception.""" @@ -135,7 +129,7 @@ def keys(self, pattern): """Mimic original redis function but using user custom prefix.""" try: return [ - _bytes_to_string(key) + key for key in self._prefix_helper.remove_prefix(self._decorated.keys(self._prefix_helper.add_prefix(pattern))) ] except RedisError as exc: @@ -153,7 +147,7 @@ def set(self, name, value, *args, **kwargs): def get(self, name): """Mimic original redis function but using user custom prefix.""" try: - return _bytes_to_string(self._decorated.get(self._prefix_helper.add_prefix(name))) + return self._decorated.get(self._prefix_helper.add_prefix(name)) except RedisError as exc: raise RedisAdapterException('Error executing get operation') from exc @@ -189,7 +183,7 @@ def mget(self, names): """Mimic original redis function but using user custom prefix.""" try: return [ - _bytes_to_string(item) + item for item in self._decorated.mget(self._prefix_helper.add_prefix(names)) ] except RedisError as exc: @@ -199,7 +193,7 @@ def smembers(self, name): """Mimic original redis function but using user custom prefix.""" try: return [ - _bytes_to_string(item) + item for item in self._decorated.smembers(self._prefix_helper.add_prefix(name)) ] except RedisError as exc: @@ -243,7 +237,7 @@ def hset(self, name, key, value): def hget(self, name, key): """Mimic original redis function but using user custom prefix.""" try: - return _bytes_to_string(self._decorated.hget(self._prefix_helper.add_prefix(name), key)) + return self._decorated.hget(self._prefix_helper.add_prefix(name), key) except RedisError as exc: raise RedisAdapterException('Error executing hget operation') from exc @@ -257,7 +251,7 @@ def incr(self, name, amount=1): def getset(self, name, value): """Mimic original redis function but using user custom prefix.""" try: - return _bytes_to_string(self._decorated.getset(self._prefix_helper.add_prefix(name), value)) + return self._decorated.getset(self._prefix_helper.add_prefix(name), value) except RedisError as exc: raise RedisAdapterException('Error executing getset operation') from exc @@ -278,7 +272,7 @@ def expire(self, key, value): def rpop(self, key): """Mimic original redis function but using user custom prefix.""" try: - return _bytes_to_string(self._decorated.rpop(self._prefix_helper.add_prefix(key))) + return self._decorated.rpop(self._prefix_helper.add_prefix(key)) except RedisError as exc: raise RedisAdapterException('Error executing rpop operation') from exc @@ -359,9 +353,9 @@ def _build_default_client(config): # pylint: disable=too-many-locals unix_socket_path = config.get('redisUnixSocketPath', None) encoding = config.get('redisEncoding', 'utf-8') encoding_errors = config.get('redisEncodingErrors', 'strict') - charset = config.get('redisCharset', None) + charset = config.get('redisCharset', 'utf-8') errors = config.get('redisErrors', None) - decode_responses = config.get('redisDecodeResponses', False) + decode_responses = config.get('redisDecodeResponses', True) retry_on_timeout = config.get('redisRetryOnTimeout', False) ssl = config.get('redisSsl', False) ssl_keyfile = config.get('redisSslKeyfile', None) @@ -438,7 +432,7 @@ def _build_sentinel_client(config): # pylint: disable=too-many-locals connection_pool = config.get('redisConnectionPool', None) encoding = config.get('redisEncoding', 'utf-8') encoding_errors = config.get('redisEncodingErrors', 'strict') - decode_responses = config.get('redisDecodeResponses', False) + decode_responses = config.get('redisDecodeResponses', True) retry_on_timeout = config.get('redisRetryOnTimeout', False) max_connections = config.get('redisMaxConnections', None) prefix = config.get('redisPrefix') diff --git a/tests/.DS_Store b/tests/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..ab1b61f97e8de91e1f5ea7ca39c7e9ac4e976bce GIT binary patch literal 6148 zcmeHKJ8Hu~5S?*c2-K)dxmU;y79pR&7f2vM7=jGOPOU2E%F*)XLqNzWT(~iBVCL=4 z&Rd~ZXfz_C{pI~dq!W<_ZYbXt)@JACE8Ap7fpFY0%1PejEC)HRs;?)E+oFB~>tn&6 zzV@f-^Ei!h`*k|ARDcRl0V+TRsKAX1Snq{3Z6G5RpaN9jPXYTr6u4ncoCE#Sf#4$m zaEr7X);>!BizR?HaSlWVra=V;RddA9pd(+ht|rcbK^M*8L-Wa+6N>uNaewi0(Hh7| z1*pJMfv4DZ*8g|#f9C%si90Gl1s+NP-46Sq$17!TUA>(3+5&%tTg?yL3~Q$#csmAq iJI2P^@v|32U9mOJYvLT}bmW~5| literal 0 HcmV?d00001 diff --git a/tests/client/test_factory.py b/tests/client/test_factory.py index 7d9136c8..997540e5 100644 --- a/tests/client/test_factory.py +++ b/tests/client/test_factory.py @@ -81,9 +81,7 @@ def test_redis_client_creation(self, mocker): 'redisSocketKeepaliveOptions': False, 'redisConnectionPool': False, 'redisUnixSocketPath': '/some_path', - 'redisEncoding': 'ascii', 'redisEncodingErrors': 'non-strict', - 'redisCharset': 'ascii', 'redisErrors': True, 'redisDecodeResponses': True, 'redisRetryOnTimeout': True, @@ -120,9 +118,9 @@ def test_redis_client_creation(self, mocker): socket_keepalive_options=False, connection_pool=False, unix_socket_path='/some_path', - encoding='ascii', + encoding='utf-8', encoding_errors='non-strict', - charset='ascii', + charset='utf-8', errors=True, decode_responses=True, retry_on_timeout=True, diff --git a/tests/integration/test_client_e2e.py b/tests/integration/test_client_e2e.py index 0a7e8ff6..9723f655 100644 --- a/tests/integration/test_client_e2e.py +++ b/tests/integration/test_client_e2e.py @@ -12,10 +12,11 @@ InMemorySegmentStorage, InMemorySplitStorage, InMemoryTelemetryStorage from splitio.storage.redis import RedisEventsStorage, RedisImpressionsStorage, \ RedisSplitStorage, RedisSegmentStorage, RedisTelemetryStorage -from splitio.storage.adapters.redis import RedisAdapter +from splitio.storage.adapters.redis import build, RedisAdapter from splitio.models import splits, segments from splitio.engine.impressions import Manager as ImpressionsManager, ImpressionsMode from splitio.recorder.recorder import StandardRecorder, PipelinedRecorder +from splitio.client.config import DEFAULT_CONFIG class InMemoryIntegrationTests(object): @@ -489,7 +490,7 @@ class RedisIntegrationTests(object): def setup_method(self): """Prepare storages with test data.""" metadata = SdkMetadata('python-1.2.3', 'some_ip', 'some_name') - redis_client = RedisAdapter(StrictRedis()) + redis_client = build(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorage(redis_client) segment_storage = RedisSegmentStorage(redis_client) @@ -771,7 +772,7 @@ class RedisWithCacheIntegrationTests(RedisIntegrationTests): def setup_method(self): """Prepare storages with test data.""" metadata = SdkMetadata('python-1.2.3', 'some_ip', 'some_name') - redis_client = RedisAdapter(StrictRedis()) + redis_client = build(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorage(redis_client, True) segment_storage = RedisSegmentStorage(redis_client) diff --git a/tests/storage/adapters/test_redis_adapter.py b/tests/storage/adapters/test_redis_adapter.py index 4d948d28..a159a918 100644 --- a/tests/storage/adapters/test_redis_adapter.py +++ b/tests/storage/adapters/test_redis_adapter.py @@ -90,7 +90,7 @@ def test_adapter_building(self, mocker): 'redisUnixSocketPath': '/tmp/socket', 'redisEncoding': 'utf-8', 'redisEncodingErrors': 'strict', - 'redisCharset': 'ascii', + 'redisCharset': 'utf-8', 'redisErrors': 'abc', 'redisDecodeResponses': True, 'redisRetryOnTimeout': True, @@ -117,7 +117,7 @@ def test_adapter_building(self, mocker): unix_socket_path='/tmp/socket', encoding='utf-8', encoding_errors='strict', - charset='ascii', + charset='utf-8', errors='abc', decode_responses=True, retry_on_timeout=True, @@ -142,7 +142,7 @@ def test_adapter_building(self, mocker): 'redisUnixSocketPath': '/tmp/socket', 'redisEncoding': 'utf-8', 'redisEncodingErrors': 'strict', - 'redisCharset': 'ascii', + 'redisCharset': 'utf-8', 'redisErrors': 'abc', 'redisDecodeResponses': True, 'redisRetryOnTimeout': True, From 2173cc42af3e15a04e472cba14d37c9da24fd56e Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Thu, 8 Apr 2021 13:52:01 -0300 Subject: [PATCH 09/17] removed DS --- tests/.DS_Store | Bin 6148 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 tests/.DS_Store diff --git a/tests/.DS_Store b/tests/.DS_Store deleted file mode 100644 index ab1b61f97e8de91e1f5ea7ca39c7e9ac4e976bce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKJ8Hu~5S?*c2-K)dxmU;y79pR&7f2vM7=jGOPOU2E%F*)XLqNzWT(~iBVCL=4 z&Rd~ZXfz_C{pI~dq!W<_ZYbXt)@JACE8Ap7fpFY0%1PejEC)HRs;?)E+oFB~>tn&6 zzV@f-^Ei!h`*k|ARDcRl0V+TRsKAX1Snq{3Z6G5RpaN9jPXYTr6u4ncoCE#Sf#4$m zaEr7X);>!BizR?HaSlWVra=V;RddA9pd(+ht|rcbK^M*8L-Wa+6N>uNaewi0(Hh7| z1*pJMfv4DZ*8g|#f9C%si90Gl1s+NP-46Sq$17!TUA>(3+5&%tTg?yL3~Q$#csmAq iJI2P^@v|32U9mOJYvLT}bmW~5| From 6aa18c2a44ebab82a2c313e1a7c1e8df5de41b08 Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Thu, 8 Apr 2021 13:53:49 -0300 Subject: [PATCH 10/17] excluding DS_Store --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 72f8848f..31959c04 100644 --- a/.gitignore +++ b/.gitignore @@ -71,3 +71,5 @@ target/ # vim backup files *.swp + +.DS_Store \ No newline at end of file From 0ad7b5504ac0b08fee7ca43212be31c79a9a2c0d Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Mon, 12 Apr 2021 12:09:58 -0300 Subject: [PATCH 11/17] updated --- CHANGES.txt | 3 +++ splitio/version.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index dab9cbe9..c0c4fdbd 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,6 @@ +9.0.0 () + - BREAKING CHANGE: Deprecated Python2. + 8.4.0 (Jan 6, 2021) - Added RecordStats for supporting pipelined recording in redis when treatment call is made. - Added hooks support for preforked servers. diff --git a/splitio/version.py b/splitio/version.py index b0193b93..e2921057 100644 --- a/splitio/version.py +++ b/splitio/version.py @@ -1 +1 @@ -__version__ = '8.4.0' +__version__ = '9.0.0-rc1' From 919473e67b487c003bf1ee0dfafe7e8bd41f5b8e Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Wed, 21 Apr 2021 12:07:16 -0300 Subject: [PATCH 12/17] preparing release 9.0.0 --- CHANGES.txt | 4 +++- splitio/version.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index c0c4fdbd..fdd043af 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,5 +1,7 @@ -9.0.0 () +9.0.0 (Apr 21, 2021) - BREAKING CHANGE: Deprecated Python2. + - Removed six, future and futures libs for compatibility between Python2 and Python3. + - Updated strings encoding to utf-8 by default for Redis. 8.4.0 (Jan 6, 2021) - Added RecordStats for supporting pipelined recording in redis when treatment call is made. diff --git a/splitio/version.py b/splitio/version.py index e2921057..33de8d16 100644 --- a/splitio/version.py +++ b/splitio/version.py @@ -1 +1 @@ -__version__ = '9.0.0-rc1' +__version__ = '9.0.0' From 190df930e776e82f400d5f503cb7a48c94780333 Mon Sep 17 00:00:00 2001 From: Mauro Sanz <51236193+sanzmauro@users.noreply.github.com> Date: Mon, 26 Apr 2021 18:46:54 -0300 Subject: [PATCH 13/17] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 67a67427..1bff41b8 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ Split has built and maintains SDKs for: * Java [Github](https://github.com/splitio/java-client) [Docs](https://help.split.io/hc/en-us/articles/360020405151-Java-SDK) * Javascript [Github](https://github.com/splitio/javascript-client) [Docs](https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK) * Node [Github](https://github.com/splitio/javascript-client) [Docs](https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK) -* .NET [Github](https://github.com/splitio/.net-core-client) [Docs](https://help.split.io/hc/en-us/articles/360020240172--NET-SDK) +* .NET [Github](https://github.com/splitio/dotnet-client) [Docs](https://help.split.io/hc/en-us/articles/360020240172--NET-SDK) * Ruby [Github](https://github.com/splitio/ruby-client) [Docs](https://help.split.io/hc/en-us/articles/360020673251-Ruby-SDK) * PHP [Github](https://github.com/splitio/php-client) [Docs](https://help.split.io/hc/en-us/articles/360020350372-PHP-SDK) * Python [Github](https://github.com/splitio/python-client) [Docs](https://help.split.io/hc/en-us/articles/360020359652-Python-SDK) From 9283a570d729bbf16b3c4382422c41049fb80172 Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Tue, 27 Apr 2021 13:56:42 -0300 Subject: [PATCH 14/17] removing old uwsgi cache --- CHANGES.txt | 3 +- splitio/client/config.py | 3 - splitio/client/factory.py | 36 -- splitio/client/util.py | 1 - splitio/engine/cache/lru.py | 4 +- splitio/engine/impressions.py | 1 - splitio/models/datatypes.py | 5 + splitio/models/notification.py | 59 +- splitio/models/token.py | 18 +- splitio/push/manager.py | 2 +- splitio/storage/adapters/cache_trait.py | 8 +- splitio/storage/adapters/util.py | 4 +- splitio/storage/adapters/uwsgi_cache.py | 133 ----- splitio/storage/uwsgi.py | 746 ------------------------ splitio/sync/segment.py | 1 - splitio/tasks/__init__.py | 1 + splitio/tasks/segment_sync.py | 1 - splitio/tasks/telemetry_sync.py | 2 - splitio/tasks/uwsgi_wrappers.py | 192 ------ splitio/version.py | 2 +- tests/client/test_config.py | 1 - tests/client/test_factory.py | 24 +- tests/client/test_input_validator.py | 6 - tests/storage/test_uwsgi.py | 320 ---------- tests/tasks/test_uwsgi_wrappers.py | 137 ----- 25 files changed, 59 insertions(+), 1651 deletions(-) delete mode 100644 splitio/storage/adapters/uwsgi_cache.py delete mode 100644 splitio/storage/uwsgi.py delete mode 100644 splitio/tasks/uwsgi_wrappers.py delete mode 100644 tests/storage/test_uwsgi.py delete mode 100644 tests/tasks/test_uwsgi_wrappers.py diff --git a/CHANGES.txt b/CHANGES.txt index 43f26b69..2ba02431 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,7 +1,8 @@ -9.0.0 (Apr 21, 2021) +9.0.0 (Apr 28, 2021) - BREAKING CHANGE: Deprecated Python2. - Removed six, future and futures libs for compatibility between Python2 and Python3. - Updated strings encoding to utf-8 by default for Redis. + - Deprecated uWSGI cache. 8.4.1 (Apr 16, 2021) - Bumped mmh3cffi dependency which now requires c99 flag to build. diff --git a/splitio/client/config.py b/splitio/client/config.py index 98dbfec8..0bdb9843 100644 --- a/splitio/client/config.py +++ b/splitio/client/config.py @@ -74,9 +74,6 @@ def _parse_operation_mode(apikey, config): if 'redisHost' in config or 'redisSentinels' in config: return 'redis-consumer' - if 'uwsgiClient' in config: - return 'uwsgi-consumer' - return 'inmemory-standalone' diff --git a/splitio/client/factory.py b/splitio/client/factory.py index 28389094..e2e56990 100644 --- a/splitio/client/factory.py +++ b/splitio/client/factory.py @@ -19,9 +19,6 @@ from splitio.storage.adapters import redis from splitio.storage.redis import RedisSplitStorage, RedisSegmentStorage, RedisImpressionsStorage, \ RedisEventsStorage, RedisTelemetryStorage -from splitio.storage.adapters.uwsgi_cache import get_uwsgi -from splitio.storage.uwsgi import UWSGIEventStorage, UWSGIImpressionStorage, UWSGISegmentStorage, \ - UWSGISplitStorage, UWSGITelemetryStorage # APIs from splitio.api.client import HttpClient @@ -420,36 +417,6 @@ def _build_redis_factory(api_key, cfg): ) -def _build_uwsgi_factory(api_key, cfg): - """Build and return a split factory with redis-based storage.""" - sdk_metadata = util.get_metadata(cfg) - uwsgi_adapter = get_uwsgi() - storages = { - 'splits': UWSGISplitStorage(uwsgi_adapter), - 'segments': UWSGISegmentStorage(uwsgi_adapter), - 'impressions': UWSGIImpressionStorage(uwsgi_adapter), - 'events': UWSGIEventStorage(uwsgi_adapter), - 'telemetry': UWSGITelemetryStorage(uwsgi_adapter) - } - recorder = StandardRecorder( - ImpressionsManager(cfg['impressionsMode'], True, - _wrap_impression_listener(cfg['impressionListener'], sdk_metadata)), - storages['telemetry'], - storages['events'], - storages['impressions'], - ) - _LOGGER.warning( - "Beware: uwsgi-cache based operation mode is soon to be deprecated. Please consider " + - "redis if you need a centralized point of syncrhonization, or in-memory (with preforking " + - "support enabled) if running uwsgi with a master and several http workers)") - return SplitFactory( - api_key, - storages, - cfg['labelsEnabled'], - recorder, - ) - - def _build_localhost_factory(cfg): """Build and return a localhost factory for testing/development purposes.""" storages = { @@ -521,9 +488,6 @@ def get_factory(api_key, **kwargs): if config['operationMode'] == 'redis-consumer': return _build_redis_factory(api_key, config) - if config['operationMode'] == 'uwsgi-consumer': - return _build_uwsgi_factory(api_key, config) - return _build_in_memory_factory( api_key, config, diff --git a/splitio/client/util.py b/splitio/client/util.py index f37ffe6e..040a09ae 100644 --- a/splitio/client/util.py +++ b/splitio/client/util.py @@ -1,6 +1,5 @@ """General purpose SDK utilities.""" -import inspect import socket from collections import namedtuple from splitio.version import __version__ diff --git a/splitio/engine/cache/lru.py b/splitio/engine/cache/lru.py index d1a3395c..2f720a35 100644 --- a/splitio/engine/cache/lru.py +++ b/splitio/engine/cache/lru.py @@ -5,7 +5,7 @@ DEFAULT_MAX_SIZE = 5000 -class SimpleLruCache(object): #pylint: disable=too-many-instance-attributes +class SimpleLruCache(object): # pylint: disable=too-many-instance-attributes """ Key/Value local memory cache. with expiration & LRU eviction. @@ -21,7 +21,7 @@ class SimpleLruCache(object): #pylint: disable=too-many-instance-attributes None <---next--- || node || <---next--- || node || ... <---next--- || node || """ - class _Node(object): #pylint: disable=too-few-public-methods + class _Node(object): # pylint: disable=too-few-public-methods """Links to previous an next items in the circular list.""" def __init__(self, key, value, previous_element, next_element): diff --git a/splitio/engine/impressions.py b/splitio/engine/impressions.py index a1184867..c8720b5d 100644 --- a/splitio/engine/impressions.py +++ b/splitio/engine/impressions.py @@ -1,5 +1,4 @@ """Split evaluator module.""" -import logging import threading from collections import defaultdict, namedtuple from enum import Enum diff --git a/splitio/models/datatypes.py b/splitio/models/datatypes.py index 7cbe466a..751c2908 100644 --- a/splitio/models/datatypes.py +++ b/splitio/models/datatypes.py @@ -1,5 +1,6 @@ """Datatypes converters for matchers.""" + def ts_truncate_seconds(timestamp): """ Set seconds to zero in a timestamp. @@ -12,6 +13,7 @@ def ts_truncate_seconds(timestamp): """ return timestamp - (timestamp % 60) + def ts_truncate_time(timestamp): """ Set time to zero in a timestamp. @@ -24,6 +26,7 @@ def ts_truncate_time(timestamp): """ return timestamp - (timestamp % 86400) + def java_ts_to_secs(java_ts): """ Convert java timestamp into unix timestamp. @@ -36,6 +39,7 @@ def java_ts_to_secs(java_ts): """ return java_ts / 1000 + def java_ts_truncate_seconds(java_ts): """ Set seconds to zero in a timestamp. @@ -48,6 +52,7 @@ def java_ts_truncate_seconds(java_ts): """ return ts_truncate_seconds(java_ts_to_secs(java_ts)) + def java_ts_truncate_time(java_ts): """ Set time to zero in a timestamp. diff --git a/splitio/models/notification.py b/splitio/models/notification.py index 68915130..ebe57175 100644 --- a/splitio/models/notification.py +++ b/splitio/models/notification.py @@ -40,7 +40,7 @@ def __init__(self, channel, notification_type, control_type): self._channel = channel self._notification_type = Type(notification_type) self._control_type = Control(control_type) - + @property def channel(self): return self._channel @@ -87,7 +87,7 @@ def change_number(self): @property def notification_type(self): return self._notification_type - + @property def segment_name(self): return self._segment_name @@ -111,7 +111,7 @@ def __init__(self, channel, notification_type, change_number): self._channel = channel self._notification_type = Type(notification_type) self._change_number = change_number - + @property def channel(self): return self._channel @@ -149,23 +149,23 @@ def __init__(self, channel, notification_type, change_number, default_treatment, self._change_number = change_number self._default_treatment = default_treatment self._split_name = split_name - + @property def channel(self): return self._channel - + @property def change_number(self): return self._change_number - + @property def default_treatment(self): return self._default_treatment - + @property def notification_type(self): return self._notification_type - + @property def split_name(self): return self._split_name @@ -178,25 +178,26 @@ def split_name(self): Type.CONTROL: lambda c, d: ControlNotification(c, Type.CONTROL, d['controlType']) } -def wrap_notification(raw_data, channel): - """ - Parse notification from raw notification payload - :param raw_data: data - :type raw_data: str - :param channel: Channel of incoming notification - :type channel: str - """ - try: - if channel is None: - raise ValueError("channel cannot be None.") - raw_data = json.loads(raw_data) - notification_type = Type(raw_data['type']) - mapper = _NOTIFICATION_MAPPERS[notification_type] - return mapper(channel, raw_data) - except ValueError: - raise ValueError("Wrong notification type received.") - except KeyError: - raise KeyError("Could not parse notification.") - except TypeError: - raise TypeError("Wrong JSON format.") +def wrap_notification(raw_data, channel): + """ + Parse notification from raw notification payload + + :param raw_data: data + :type raw_data: str + :param channel: Channel of incoming notification + :type channel: str + """ + try: + if channel is None: + raise ValueError("channel cannot be None.") + raw_data = json.loads(raw_data) + notification_type = Type(raw_data['type']) + mapper = _NOTIFICATION_MAPPERS[notification_type] + return mapper(channel, raw_data) + except ValueError: + raise ValueError("Wrong notification type received.") + except KeyError: + raise KeyError("Could not parse notification.") + except TypeError: + raise TypeError("Wrong JSON format.") diff --git a/splitio/models/token.py b/splitio/models/token.py index 3c050d57..33c4f48c 100644 --- a/splitio/models/token.py +++ b/splitio/models/token.py @@ -3,6 +3,7 @@ import base64 import json + class Token(object): """Token object class.""" @@ -30,27 +31,27 @@ def __init__(self, push_enabled, token, channels, exp, iat): self._channels = channels self._exp = exp self._iat = iat - + @property def push_enabled(self): """Return push_enabled""" return self._push_enabled - + @property def token(self): """Return token""" return self._token - + @property def channels(self): """Return channels""" return self._channels - + @property def exp(self): """Return exp""" return self._exp - + @property def iat(self): """Return iat""" @@ -66,15 +67,16 @@ def decode_token(raw_token): push_enabled = raw_token['pushEnabled'] if not push_enabled or len(token.strip()) == 0: return None, None, None - + token_parts = token.split('.') if len(token_parts) < 2: return None, None, None - + to_decode = token_parts[1] - decoded_payload = base64.b64decode(to_decode + '='*(-len(to_decode) % 4)) + decoded_payload = base64.b64decode(to_decode + '='*(-len(to_decode) % 4)) return push_enabled, token, json.loads(decoded_payload) + def from_raw(raw_token): """ Parse a new token from a raw token response. diff --git a/splitio/push/manager.py b/splitio/push/manager.py index 55814c7b..1e529b66 100644 --- a/splitio/push/manager.py +++ b/splitio/push/manager.py @@ -114,7 +114,7 @@ def _event_handler(self, event): try: handle(parsed) - except Exception: #pylint:disable=broad-except + except Exception: # pylint:disable=broad-except _LOGGER.error('something went wrong when processing message of type %s', parsed.event_type) _LOGGER.debug(str(parsed), exc_info=True) diff --git a/splitio/storage/adapters/cache_trait.py b/splitio/storage/adapters/cache_trait.py index d3db3b67..399ee383 100644 --- a/splitio/storage/adapters/cache_trait.py +++ b/splitio/storage/adapters/cache_trait.py @@ -9,7 +9,7 @@ DEFAULT_MAX_SIZE = 100 -class LocalMemoryCache(object): #pylint: disable=too-many-instance-attributes +class LocalMemoryCache(object): # pylint: disable=too-many-instance-attributes """ Key/Value local memory cache. with expiration & LRU eviction. @@ -25,10 +25,10 @@ class LocalMemoryCache(object): #pylint: disable=too-many-instance-attributes None <---next--- || node || <---next--- || node || ... <---next--- || node || """ - class _Node(object): #pylint: disable=too-few-public-methods + class _Node(object): # pylint: disable=too-few-public-methods """Links to previous an next items in the circular list.""" - def __init__(self, key, value, last_update, previous_element, next_element): #pylint: disable=too-many-arguments + def __init__(self, key, value, last_update, previous_element, next_element): # pylint: disable=too-many-arguments """Class constructor.""" self.key = key # we also keep the key for O(1) access when removing the LRU. self.value = value @@ -186,7 +186,7 @@ def _decorator(user_function): _cache = LocalMemoryCache(key_func, user_function, max_age_seconds, max_size) # The lambda below IS necessary, otherwise update_wrapper fails since the function # is an instance method and has no reference to the __module__ namespace. - wrapper = lambda *args, **kwargs: _cache.get(*args, **kwargs) #pylint: disable=unnecessary-lambda + wrapper = lambda *args, **kwargs: _cache.get(*args, **kwargs) # pylint: disable=unnecessary-lambda return update_wrapper(wrapper, user_function) return _decorator diff --git a/splitio/storage/adapters/util.py b/splitio/storage/adapters/util.py index f8602602..cf80a2ad 100644 --- a/splitio/storage/adapters/util.py +++ b/splitio/storage/adapters/util.py @@ -1,7 +1,7 @@ """Custom utilities.""" -class DynamicDecorator(object): #pylint: disable=too-few-public-methods +class DynamicDecorator(object): # pylint: disable=too-few-public-methods """ Decorator that will inject a decorator during class construction. @@ -65,7 +65,7 @@ def __call__(self, to_decorate): positional_args_lambdas = self._positional_args_lambdas keyword_args_lambdas = self._keyword_args_lambdas - class _decorated(to_decorate): #pylint: disable=too-few-public-methods + class _decorated(to_decorate): # pylint: disable=too-few-public-methods """ Decorated class wrapper. diff --git a/splitio/storage/adapters/uwsgi_cache.py b/splitio/storage/adapters/uwsgi_cache.py deleted file mode 100644 index 3cf41150..00000000 --- a/splitio/storage/adapters/uwsgi_cache.py +++ /dev/null @@ -1,133 +0,0 @@ -"""UWSGI Cache Storage adapter module.""" - -import time - -try: - # uwsgi is loaded at runtime by uwsgi app. - import uwsgi -except ImportError: - def missing_uwsgi_dependencies(*args, **kwargs): # pylint: disable=unused-argument - """Only complain for missing deps if they're used.""" - raise NotImplementedError('Missing uWSGI support dependencies.') - uwsgi = missing_uwsgi_dependencies - -# Cache used for locking & signaling keys -_SPLITIO_LOCK_CACHE_NAMESPACE = 'splitio_locks' - -# Cache where split definitions are stored -_SPLITIO_SPLITS_CACHE_NAMESPACE = 'splitio_splits' - -# Cache where segments are stored -_SPLITIO_SEGMENTS_CACHE_NAMESPACE = 'splitio_segments' - -# Cache where impressions are stored -_SPLITIO_IMPRESSIONS_CACHE_NAMESPACE = 'splitio_impressions' - -# Cache where metrics are stored -_SPLITIO_METRICS_CACHE_NAMESPACE = 'splitio_metrics' - -# Cache where events are stored (1 key with lots of blocks) -_SPLITIO_EVENTS_CACHE_NAMESPACE = 'splitio_events' - -# Cache where changeNumbers are stored -_SPLITIO_CHANGE_NUMBERS = 'splitio_changeNumbers' - -# Cache with a big block size used for lists -_SPLITIO_MISC_NAMESPACE = 'splitio_misc' - - -class UWSGILock(object): - """Context manager to be used for locking a key in the cache.""" - - def __init__(self, adapter, key, overwrite_lock_seconds=5): - """ - Initialize a lock with the key `key` and waits up to `overwrite_lock_seconds` to release. - - :param key: Key to be used. - :type key: str - - :param overwrite_lock_seconds: How many seconds to wait before force-releasing. - :type overwrite_lock_seconds: int - """ - self._key = key - self._overwrite_lock_seconds = overwrite_lock_seconds - self._uwsgi = adapter - - def __enter__(self): - """Loop until the lock is manually released or timeout occurs.""" - initial_time = time.time() - while True: - if not self._uwsgi.cache_exists(self._key, _SPLITIO_LOCK_CACHE_NAMESPACE): - self._uwsgi.cache_set(self._key, str('locked'), 0, _SPLITIO_LOCK_CACHE_NAMESPACE) - return - else: - if time.time() - initial_time > self._overwrite_lock_seconds: - return - time.sleep(0.1) - - def __exit__(self, *args): - """Remove lock.""" - self._uwsgi.cache_del(self._key, _SPLITIO_LOCK_CACHE_NAMESPACE) - - -class UWSGICacheEmulator(object): - """UWSGI mock.""" - - def __init__(self): - """ - UWSGI Cache Emulator for unit tests. Implements uwsgi cache framework interface. - - http://uwsgi-docs.readthedocs.io/en/latest/Caching.html#accessing-the-cache-from-your-applications-using-the-cache-api - """ - self._cache = dict() - - @staticmethod - def _check_string_data_type(value): - if type(value).__name__ == 'str': - return True - raise TypeError( - 'The value to add into uWSGI cache must be string and %s given' % type(value).__name__ - ) - - def cache_get(self, key, cache_namespace='default'): - """Get an element from cache.""" - if self.cache_exists(key, cache_namespace): - return self._cache[cache_namespace][key] - return None - - def cache_set(self, key, value, expires=0, cache_namespace='default'): # pylint: disable=unused-argument - """Set an elemen in the cache.""" - self._check_string_data_type(value) - - if cache_namespace in self._cache: - self._cache[cache_namespace][key] = value - else: - self._cache[cache_namespace] = {key: value} - - def cache_update(self, key, value, expires=0, cache_namespace='default'): - """Update an element.""" - self.cache_set(key, value, expires, cache_namespace) - - def cache_exists(self, key, cache_namespace='default'): - """Return whether the element exists.""" - if cache_namespace in self._cache: - if key in self._cache[cache_namespace]: - return True - return False - - def cache_del(self, key, cache_namespace='default'): - """Delete an item from the cache.""" - if cache_namespace in self._cache: - self._cache[cache_namespace].pop(key, None) - - def cache_clear(self, cache_namespace='default'): - """Delete all elements in cache.""" - self._cache.pop(cache_namespace, None) - - -def get_uwsgi(emulator=False): - """Return a uwsgi imported module or an emulator to use in unit test.""" - if emulator: - return UWSGICacheEmulator() - - return uwsgi diff --git a/splitio/storage/uwsgi.py b/splitio/storage/uwsgi.py deleted file mode 100644 index 47cc44e6..00000000 --- a/splitio/storage/uwsgi.py +++ /dev/null @@ -1,746 +0,0 @@ -"""UWSGI Cache based storages implementation module.""" -import logging -import json - -from splitio.storage import SplitStorage, SegmentStorage, ImpressionStorage, EventStorage, \ - TelemetryStorage -from splitio.models import splits, segments -from splitio.models.impressions import Impression -from splitio.models.events import Event -from splitio.storage.adapters.uwsgi_cache import _SPLITIO_CHANGE_NUMBERS, \ - _SPLITIO_EVENTS_CACHE_NAMESPACE, _SPLITIO_IMPRESSIONS_CACHE_NAMESPACE, \ - _SPLITIO_METRICS_CACHE_NAMESPACE, _SPLITIO_MISC_NAMESPACE, UWSGILock, \ - _SPLITIO_SEGMENTS_CACHE_NAMESPACE, _SPLITIO_SPLITS_CACHE_NAMESPACE, \ - _SPLITIO_LOCK_CACHE_NAMESPACE - - -_LOGGER = logging.getLogger(__name__) - - -class UWSGISplitStorage(SplitStorage): - """UWSGI-Cache based implementation of a split storage.""" - - _KEY_TEMPLATE = 'split.{suffix}' - _KEY_TILL = 'splits.till' - _KEY_FEATURE_LIST = 'splits.list' - _KEY_FEATURE_LIST_LOCK = 'splits.list.lock' - _KEY_TRAFFIC_TYPES = 'splits.traffic_types' - _KEY_TRAFFIC_TYPES_LOCK = 'splits.traffic_types.lock' - _OVERWRITE_LOCK_SECONDS = 5 - - def __init__(self, uwsgi_entrypoint): - """ - Class constructor. - - :param uwsgi_entrypoint: UWSGI module. Can be the actual module or a mock. - :type uwsgi_entrypoint: module - """ - self._uwsgi = uwsgi_entrypoint - - def get(self, split_name): - """ - Retrieve a split. - - :param split_name: Name of the feature to fetch. - :type split_name: str - - :rtype: str - """ - raw = self._uwsgi.cache_get( - self._KEY_TEMPLATE.format(suffix=split_name), - _SPLITIO_SPLITS_CACHE_NAMESPACE - ) - to_return = splits.from_raw(json.loads(raw)) if raw is not None else None - if not to_return: - _LOGGER.warning("Trying to retrieve nonexistant split %s. Ignoring.", split_name) - return to_return - - def fetch_many(self, split_names): - """ - Retrieve splits. - - :param split_names: Names of the features to fetch. - :type split_name: list(str) - - :return: A dict with split objects parsed from queue. - :rtype: dict(split_name, splitio.models.splits.Split) - """ - return {split_name: self.get(split_name) for split_name in split_names} - - def put(self, split): - """ - Store a split. - - :param split: Split object to store - :type split: splitio.models.splits.Split - """ - self._uwsgi.cache_update( - self._KEY_TEMPLATE.format(suffix=split.name), - json.dumps(split.to_json()), - 0, - _SPLITIO_SPLITS_CACHE_NAMESPACE - ) - self._add_split_to_list(split.name) - self._increase_traffic_type_count(split.traffic_type_name) - - def remove(self, split_name): - """ - Remove a split from storage. - - :param split_name: Name of the feature to remove. - :type split_name: str - - :return: True if the split was found and removed. False otherwise. - :rtype: bool - """ - # We need to fetch the split to get the traffic type name prior to deleting. - fetched = self.get(split_name) - if fetched is None: - _LOGGER.warning( - "Tried to remove feature \"%s\" not present in cache. Ignoring.", split_name - ) - return - - result = self._uwsgi.cache_del( - self._KEY_TEMPLATE.format(suffix=split_name), - _SPLITIO_SPLITS_CACHE_NAMESPACE - ) - if result is not False: - _LOGGER.warning("Trying to delete nonexistant split %s. Ignoring.", split_name) - - self._remove_split_from_list(split_name) - self._decrease_traffic_type_count(fetched.traffic_type_name) - - return result - - def get_change_number(self): - """ - Retrieve latest split change number. - - :rtype: int - """ - try: - return json.loads(self._uwsgi.cache_get(self._KEY_TILL, _SPLITIO_CHANGE_NUMBERS)) - except TypeError: - return None - - def set_change_number(self, new_change_number): - """ - Set the latest change number. - - :param new_change_number: New change number. - :type new_change_number: int - """ - self._uwsgi.cache_update(self._KEY_TILL, str(new_change_number), 0, _SPLITIO_CHANGE_NUMBERS) - - def get_split_names(self): - """ - Return a list of all the split names. - - :return: List of split names in cache. - :rtype: list(str) - """ - if self._uwsgi.cache_exists(self._KEY_FEATURE_LIST, _SPLITIO_MISC_NAMESPACE): - try: - return json.loads( - self._uwsgi.cache_get(self._KEY_FEATURE_LIST, _SPLITIO_MISC_NAMESPACE) - ) - except TypeError: # Thrown by json.loads when passing none - pass # Fall back to default return statement (empty list) - return [] - - def get_all_splits(self): - """ - Return a list of all splits in cache. - - :return: List of splits. - :rtype: list(splitio.models.splits.Split) - """ - return [self.get(split_name) for split_name in self.get_split_names()] - - def is_valid_traffic_type(self, traffic_type_name): - """ - Return whether the traffic type exists in at least one split in cache. - - :param traffic_type_name: Traffic type to validate. - :type traffic_type_name: str - - :return: True if the traffic type is valid. False otherwise. - :rtype: bool - """ - try: - tts = json.loads( - self._uwsgi.cache_get(self._KEY_TRAFFIC_TYPES, _SPLITIO_MISC_NAMESPACE) - ) - return traffic_type_name in tts - except TypeError: - return False - - def _add_split_to_list(self, split_name): - """ - Add a specific split to the list we keep track of. - - :param split_name: Name of the split to add. - :type split_name: str - """ - with UWSGILock(self._uwsgi, self._KEY_FEATURE_LIST_LOCK): - try: - current = set(json.loads( - self._uwsgi.cache_get(self._KEY_FEATURE_LIST, _SPLITIO_MISC_NAMESPACE) - )) - except TypeError: - current = set() - current.add(split_name) - self._uwsgi.cache_update( - self._KEY_FEATURE_LIST, - json.dumps(list(current)), - 0, - _SPLITIO_MISC_NAMESPACE - ) - - def _remove_split_from_list(self, split_name): - """ - Remove a specific split from the list we keep track of. - - :param split_name: Name of the split to remove. - :type split_name: str - """ - with UWSGILock(self._uwsgi, self._KEY_FEATURE_LIST_LOCK): - try: - current = set(json.loads( - self._uwsgi.cache_get(self._KEY_FEATURE_LIST, _SPLITIO_MISC_NAMESPACE) - )) - current.remove(split_name) - self._uwsgi.cache_update( - self._KEY_FEATURE_LIST, - json.dumps(list(current)), - 0, - _SPLITIO_MISC_NAMESPACE - ) - except TypeError: - # Split list not found, no need to delete anything - pass - except KeyError: - # Split not found in list. nothing to do. - pass - - def _increase_traffic_type_count(self, traffic_type_name): - """ - Increase by 1 the count for a specific traffic type. - - :param traffic_type_name: Traffic type name to increase count. - :type traffic_type_name: str - """ - with UWSGILock(self._uwsgi, self._KEY_TRAFFIC_TYPES_LOCK): - try: - tts = json.loads( - self._uwsgi.cache_get(self._KEY_TRAFFIC_TYPES, _SPLITIO_MISC_NAMESPACE) - ) - tts[traffic_type_name] = tts.get(traffic_type_name, 0) + 1 - - except TypeError: - tts = {traffic_type_name: 1} - - self._uwsgi.cache_update( - self._KEY_TRAFFIC_TYPES, json.dumps(tts), 0, _SPLITIO_MISC_NAMESPACE - ) - - def _decrease_traffic_type_count(self, traffic_type_name): - """ - Decreaase by 1 the count for a specific traffic type. - - :param traffic_type_name: Traffic type name to decrease count. - :type traffic_type_name: str - """ - with UWSGILock(self._uwsgi, self._KEY_TRAFFIC_TYPES_LOCK): - try: - tts = json.loads( - self._uwsgi.cache_get(self._KEY_TRAFFIC_TYPES, _SPLITIO_MISC_NAMESPACE) - ) - tts[traffic_type_name] = tts.get(traffic_type_name, 0) - 1 - if tts[traffic_type_name] <= 0: - del tts[traffic_type_name] - except TypeError: - # Traffic type list not present. nothing to do here. - return - - self._uwsgi.cache_update( - self._KEY_TRAFFIC_TYPES, json.dumps(tts), 0, _SPLITIO_MISC_NAMESPACE - ) - - def kill_locally(self, split_name, default_treatment, change_number): - """ - Local kill for split - - :param split_name: name of the split to perform kill - :type split_name: str - :param default_treatment: name of the default treatment to return - :type default_treatment: str - :param change_number: change_number - :type change_number: int - """ - if self.get_change_number() > change_number: - return - split = self.get(split_name) - if not split: - return - split.local_kill(default_treatment, change_number) - self.put(split) - - -class UWSGISegmentStorage(SegmentStorage): - """UWSGI-Cache based implementation of a split storage.""" - - _KEY_TEMPLATE = 'segments.{suffix}' - _SEGMENT_DATA_KEY_TEMPLATE = 'segmentData.{segment_name}' - _SEGMENT_CHANGE_NUMBER_KEY_TEMPLATE = 'segment.{segment_name}.till' - - def __init__(self, uwsgi_entrypoint): - """ - Class constructor. - - :param uwsgi_entrypoint: UWSGI module. Can be the actual module or a mock. - :type uwsgi_entrypoint: module - """ - self._uwsgi = uwsgi_entrypoint - - def get(self, segment_name): - """ - Retrieve a segment. - - :param segment_name: Name of the segment to fetch. - :type segment_name: str - - :return: Parsed segment if present. None otherwise. - :rtype: splitio.models.segments.Segment - """ - key = self._SEGMENT_DATA_KEY_TEMPLATE.format(segment_name=segment_name) - cn_key = self._SEGMENT_CHANGE_NUMBER_KEY_TEMPLATE.format(segment_name=segment_name) - try: - segment_data = json.loads(self._uwsgi.cache_get(key, _SPLITIO_SEGMENTS_CACHE_NAMESPACE)) - change_number = json.loads(self._uwsgi.cache_get(cn_key, _SPLITIO_CHANGE_NUMBERS)) - return segments.from_raw({ - 'name': segment_name, - 'added': segment_data, - 'removed': [], - 'till': change_number - }) - except TypeError: - _LOGGER.warning( - "Trying to retrieve nonexistant segment %s. Ignoring.", - segment_name - ) - return None - - def update(self, segment_name, to_add, to_remove, change_number=None): - """ - Update a segment. - - :param segment_name: Name of the segment to update. - :type segment_name: str - :param to_add: List of members to add to the segment. - :type to_add: list - :param to_remove: List of members to remove from the segment. - :type to_remove: list - """ - key = self._SEGMENT_DATA_KEY_TEMPLATE.format(segment_name=segment_name) - try: - segment_data = json.loads(self._uwsgi.cache_get(key, _SPLITIO_SEGMENTS_CACHE_NAMESPACE)) - except TypeError: - segment_data = [] - updated = set(segment_data).union(set(to_add)).difference(to_remove) - self._uwsgi.cache_update( - key, - json.dumps(list(updated)), - 0, - _SPLITIO_SEGMENTS_CACHE_NAMESPACE - ) - - if change_number is not None: - self.set_change_number(segment_name, change_number) - - def put(self, segment): - """ - Put a new segment in storage. - - :param segment: Segment to store. - :type segment: splitio.models.segments.Segent - """ - key = self._SEGMENT_DATA_KEY_TEMPLATE.format(segment_name=segment.name) - self._uwsgi.cache_update( - key, - json.dumps(list(segment.keys)), - 0, - _SPLITIO_SEGMENTS_CACHE_NAMESPACE - ) - self.set_change_number(segment.name, segment.change_number) - - def get_change_number(self, segment_name): - """ - Retrieve latest change number for a segment. - - :param segment_name: Name of the segment. - :type segment_name: str - - :rtype: int - """ - cnkey = self._SEGMENT_CHANGE_NUMBER_KEY_TEMPLATE.format(segment_name=segment_name) - try: - return json.loads(self._uwsgi.cache_get(cnkey, _SPLITIO_CHANGE_NUMBERS)) - - except TypeError: - return None - - def set_change_number(self, segment_name, new_change_number): - """ - Set the latest change number. - - :param segment_name: Name of the segment. - :type segment_name: str - :param new_change_number: New change number. - :type new_change_number: int - """ - cn_key = self._SEGMENT_CHANGE_NUMBER_KEY_TEMPLATE.format(segment_name=segment_name) - self._uwsgi.cache_update(cn_key, json.dumps(new_change_number), 0, _SPLITIO_CHANGE_NUMBERS) - - def segment_contains(self, segment_name, key): - """ - Check whether a specific key belongs to a segment in storage. - - :param segment_name: Name of the segment to search in. - :type segment_name: str - :param key: Key to search for. - :type key: str - - :return: True if the segment contains the key. False otherwise. - :rtype: bool - """ - segment = self.get(segment_name) - return segment.contains(key) - - -class UWSGIImpressionStorage(ImpressionStorage): - """Impressions storage interface.""" - - _IMPRESSIONS_KEY = 'SPLITIO.impressions.' - _LOCK_IMPRESSION_KEY = 'SPLITIO.impressions_lock' - _IMPRESSIONS_FLUSH = 'SPLITIO.impressions_flush' - _OVERWRITE_LOCK_SECONDS = 5 - - def __init__(self, adapter): - """ - Class Constructor. - - :param adapter: UWSGI Adapter/Emulator/Module. - :type: object - """ - self._uwsgi = adapter - - def put(self, impressions): - """ - Put one or more impressions in storage. - - :param impressions: List of one or more impressions to store. - :type impressions: list - """ - to_store = [i._asdict() for i in impressions] - with UWSGILock(self._uwsgi, self._LOCK_IMPRESSION_KEY): - try: - current = json.loads(self._uwsgi.cache_get( - self._IMPRESSIONS_KEY, _SPLITIO_IMPRESSIONS_CACHE_NAMESPACE - )) - except TypeError: - current = [] - - self._uwsgi.cache_update( - self._IMPRESSIONS_KEY, - json.dumps(current + to_store), - 0, - _SPLITIO_IMPRESSIONS_CACHE_NAMESPACE - ) - - def pop_many(self, count): - """ - Pop the oldest N impressions from storage. - - :param count: Number of impressions to pop. - :type count: int - """ - with UWSGILock(self._uwsgi, self._LOCK_IMPRESSION_KEY): - try: - current = json.loads(self._uwsgi.cache_get( - self._IMPRESSIONS_KEY, _SPLITIO_IMPRESSIONS_CACHE_NAMESPACE - )) - except TypeError: - return [] - - self._uwsgi.cache_update( - self._IMPRESSIONS_KEY, - json.dumps(current[count:]), - 0, - _SPLITIO_IMPRESSIONS_CACHE_NAMESPACE - ) - - return [ - Impression( - impression['matching_key'], - impression['feature_name'], - impression['treatment'], - impression['label'], - impression['change_number'], - impression['bucketing_key'], - impression['time'] - ) for impression in current[:count] - ] - - def request_flush(self): - """Set a marker in the events cache to indicate that a flush has been requested.""" - self._uwsgi.cache_set(self._IMPRESSIONS_FLUSH, 'ok', 0, _SPLITIO_LOCK_CACHE_NAMESPACE) - - def should_flush(self): - """ - Return True if a flush has been requested. - - :return: Whether a flush has been requested. - :rtype: bool - """ - value = self._uwsgi.cache_get(self._IMPRESSIONS_FLUSH, _SPLITIO_LOCK_CACHE_NAMESPACE) - return True if value is not None else False - - def acknowledge_flush(self): - """Acknowledge that a flush has been requested.""" - self._uwsgi.cache_del(self._IMPRESSIONS_FLUSH, _SPLITIO_LOCK_CACHE_NAMESPACE) - - def clear(self): - """ - Clear data. - """ - raise NotImplementedError('Not supported for uwsgi.') - - -class UWSGIEventStorage(EventStorage): - """Events storage interface.""" - - _EVENTS_KEY = 'events' - _LOCK_EVENTS_KEY = 'events_lock' - _EVENTS_FLUSH = 'events_flush' - _OVERWRITE_LOCK_SECONDS = 5 - - def __init__(self, adapter): - """ - Class Constructor. - - :param adapter: UWSGI Adapter/Emulator/Module. - :type: object - """ - self._uwsgi = adapter - - def put(self, events): - """ - Put one or more events in storage. - - :param events: List of one or more events to store. - :type events: list - """ - with UWSGILock(self._uwsgi, self._LOCK_EVENTS_KEY): - try: - current = json.loads(self._uwsgi.cache_get( - self._EVENTS_KEY, _SPLITIO_EVENTS_CACHE_NAMESPACE - )) - except TypeError: - current = [] - self._uwsgi.cache_update( - self._EVENTS_KEY, - json.dumps(current + [e.event._asdict() for e in events]), - 0, - _SPLITIO_EVENTS_CACHE_NAMESPACE - ) - - def pop_many(self, count): - """ - Pop the oldest N events from storage. - - :param count: Number of events to pop. - :type count: int - """ - with UWSGILock(self._uwsgi, self._LOCK_EVENTS_KEY): - try: - current = json.loads(self._uwsgi.cache_get( - self._EVENTS_KEY, _SPLITIO_EVENTS_CACHE_NAMESPACE - )) - except TypeError: - return [] - - self._uwsgi.cache_update( - self._EVENTS_KEY, - json.dumps(current[count:]), - 0, - _SPLITIO_EVENTS_CACHE_NAMESPACE - ) - - return [ - Event( - event['key'], - event['traffic_type_name'], - event['event_type_id'], - event['value'], - event['timestamp'], - event['properties'] - ) - for event in current[:count] - ] - - def request_flush(self): - """Set a marker in the events cache to indicate that a flush has been requested.""" - self._uwsgi.cache_set(self._EVENTS_FLUSH, 'requested', 0, _SPLITIO_LOCK_CACHE_NAMESPACE) - - def should_flush(self): - """ - Return True if a flush has been requested. - - :return: Whether a flush has been requested. - :rtype: bool - """ - value = self._uwsgi.cache_get(self._EVENTS_FLUSH, _SPLITIO_LOCK_CACHE_NAMESPACE) - return True if value is not None else False - - def acknowledge_flush(self): - """Acknowledge that a flush has been requested.""" - self._uwsgi.cache_del(self._EVENTS_FLUSH, _SPLITIO_LOCK_CACHE_NAMESPACE) - - def clear(self): - """ - Clear data. - """ - raise NotImplementedError('Not supported for uwsgi.') - - -class UWSGITelemetryStorage(TelemetryStorage): - """Telemetry storage interface.""" - - _LATENCIES_KEY = 'SPLITIO.latencies' - _GAUGES_KEY = 'SPLITIO.gauges' - _COUNTERS_KEY = 'SPLITIO.counters' - - _LATENCIES_LOCK_KEY = 'SPLITIO.latencies.lock' - _GAUGES_LOCK_KEY = 'SPLITIO.gauges.lock' - _COUNTERS_LOCK_KEY = 'SPLITIO.counters.lock' - - def __init__(self, uwsgi_entrypoint): - """ - Class constructor. - - :param uwsgi_entrypoint: uwsgi module/emulator - :type uwsgi_entrypoint: object - """ - self._uwsgi = uwsgi_entrypoint - - def inc_latency(self, name, bucket): - """ - Add a latency. - - :param name: Name of the latency metric. - :type name: str - :param value: Value of the latency metric. - :tyoe value: int - """ - if not 0 <= bucket <= 21: - _LOGGER.error('Incorect bucket "%d" for latency "%s". Ignoring.', bucket, name) - return - - with UWSGILock(self._uwsgi, self._LATENCIES_LOCK_KEY): - latencies_raw = self._uwsgi.cache_get( - self._LATENCIES_KEY, _SPLITIO_METRICS_CACHE_NAMESPACE) - latencies = json.loads(latencies_raw) if latencies_raw else {} - to_update = latencies.get(name, [0] * 22) - to_update[bucket] += 1 - latencies[name] = to_update - self._uwsgi.cache_set( - self._LATENCIES_KEY, - json.dumps(latencies), - 0, - _SPLITIO_METRICS_CACHE_NAMESPACE - ) - - def inc_counter(self, name): - """ - Increment a counter. - - :param name: Name of the counter metric. - :type name: str - """ - with UWSGILock(self._uwsgi, self._COUNTERS_LOCK_KEY): - counters_raw = self._uwsgi.cache_get( - self._COUNTERS_KEY, _SPLITIO_METRICS_CACHE_NAMESPACE) - counters = json.loads(counters_raw) if counters_raw else {} - value = counters.get(name, 0) - value += 1 - counters[name] = value - self._uwsgi.cache_set( - self._COUNTERS_KEY, - json.dumps(counters), - 0, - _SPLITIO_METRICS_CACHE_NAMESPACE - ) - - def put_gauge(self, name, value): - """ - Add a gauge metric. - - :param name: Name of the gauge metric. - :type name: str - :param value: Value of the gauge metric. - :type value: int - """ - with UWSGILock(self._uwsgi, self._GAUGES_LOCK_KEY): - gauges_raw = self._uwsgi.cache_get(self._GAUGES_KEY, _SPLITIO_METRICS_CACHE_NAMESPACE) - gauges = json.loads(gauges_raw) if gauges_raw else {} - gauges[name] = value - self._uwsgi.cache_set( - self._GAUGES_KEY, - json.dumps(gauges), - 0, - _SPLITIO_METRICS_CACHE_NAMESPACE - ) - - def pop_counters(self): - """ - Get all the counters. - - :rtype: list - """ - with UWSGILock(self._uwsgi, self._COUNTERS_LOCK_KEY): - counters_raw = self._uwsgi.cache_get( - self._COUNTERS_KEY, _SPLITIO_METRICS_CACHE_NAMESPACE) - self._uwsgi.cache_del(self._COUNTERS_KEY, _SPLITIO_METRICS_CACHE_NAMESPACE) - return json.loads(counters_raw) if counters_raw else {} - - def pop_gauges(self): - """ - Get all the gauges. - - :rtype: list - - """ - with UWSGILock(self._uwsgi, self._GAUGES_LOCK_KEY): - gauges_raw = self._uwsgi.cache_get(self._GAUGES_KEY, _SPLITIO_METRICS_CACHE_NAMESPACE) - self._uwsgi.cache_del(self._GAUGES_KEY, _SPLITIO_METRICS_CACHE_NAMESPACE) - return json.loads(gauges_raw) if gauges_raw else {} - - def pop_latencies(self): - """ - Get all latencies. - - :rtype: list - """ - with UWSGILock(self._uwsgi, self._LATENCIES_LOCK_KEY): - latencies_raw = self._uwsgi.cache_get( - self._LATENCIES_KEY, _SPLITIO_METRICS_CACHE_NAMESPACE) - self._uwsgi.cache_del(self._LATENCIES_KEY, _SPLITIO_METRICS_CACHE_NAMESPACE) - return json.loads(latencies_raw) if latencies_raw else {} - - def clear(self): - """ - Clear data. - """ - raise NotImplementedError('Not supported for uwsgi.') diff --git a/splitio/sync/segment.py b/splitio/sync/segment.py index 6e599b55..e2b08b25 100644 --- a/splitio/sync/segment.py +++ b/splitio/sync/segment.py @@ -1,7 +1,6 @@ import logging from splitio.api import APIException -from splitio.models import splits from splitio.tasks.util import workerpool from splitio.models import segments diff --git a/splitio/tasks/__init__.py b/splitio/tasks/__init__.py index 7d478a22..10c405e5 100644 --- a/splitio/tasks/__init__.py +++ b/splitio/tasks/__init__.py @@ -2,6 +2,7 @@ import abc + class BaseSynchronizationTask(object): """Syncrhonization task interface.""" diff --git a/splitio/tasks/segment_sync.py b/splitio/tasks/segment_sync.py index 5f0574e0..5297ce9f 100644 --- a/splitio/tasks/segment_sync.py +++ b/splitio/tasks/segment_sync.py @@ -1,7 +1,6 @@ """Segment syncrhonization module.""" import logging -from splitio.api import APIException from splitio.tasks import BaseSynchronizationTask from splitio.tasks.util import asynctask diff --git a/splitio/tasks/telemetry_sync.py b/splitio/tasks/telemetry_sync.py index e0339895..b17bc2ad 100644 --- a/splitio/tasks/telemetry_sync.py +++ b/splitio/tasks/telemetry_sync.py @@ -1,7 +1,5 @@ """Split Synchronization task.""" -import logging -from splitio.api import APIException from splitio.tasks import BaseSynchronizationTask from splitio.tasks.util.asynctask import AsyncTask diff --git a/splitio/tasks/uwsgi_wrappers.py b/splitio/tasks/uwsgi_wrappers.py deleted file mode 100644 index 9c304677..00000000 --- a/splitio/tasks/uwsgi_wrappers.py +++ /dev/null @@ -1,192 +0,0 @@ -"""Wrappers for tasks when using UWSGI Cache as a synchronization platform.""" - -import logging -import time - -from splitio.client.config import sanitize as sanitize_config -from splitio.client.util import get_metadata -from splitio.storage.adapters.uwsgi_cache import get_uwsgi -from splitio.storage.uwsgi import UWSGIEventStorage, UWSGIImpressionStorage, \ - UWSGISegmentStorage, UWSGISplitStorage, UWSGITelemetryStorage -from splitio.api.client import HttpClient -from splitio.api.splits import SplitsAPI -from splitio.api.segments import SegmentsAPI -from splitio.api.impressions import ImpressionsAPI -from splitio.api.telemetry import TelemetryAPI -from splitio.api.events import EventsAPI -from splitio.tasks.util import workerpool -from splitio.sync.split import SplitSynchronizer -from splitio.sync.segment import SegmentSynchronizer -from splitio.sync.impression import ImpressionSynchronizer -from splitio.sync.event import EventSynchronizer -from splitio.sync.telemetry import TelemetrySynchronizer - -_LOGGER = logging.getLogger(__name__) - - -def _get_config(user_config): - """ - Get sdk configuration using defaults + user overrides. - - :param user_config: User configuration. - :type user_config: dict - - :return: Calculated configuration. - :rtype: dict - """ - return sanitize_config(user_config['apikey'], user_config) - - -def uwsgi_update_splits(user_config): - """ - Update splits task. - - :param user_config: User-provided configuration. - :type user_config: dict - """ - config = _get_config(user_config) - metadata = get_metadata(config) - seconds = config['featuresRefreshRate'] - split_sync = SplitSynchronizer( - SplitsAPI( - HttpClient(1500, config.get('sdk_url'), config.get('events_url')), config['apikey'], - metadata - ), - UWSGISplitStorage(get_uwsgi()), - ) - - while True: - try: - split_sync.synchronize_splits() # pylint: disable=protected-access - time.sleep(seconds) - except Exception: # pylint: disable=broad-except - _LOGGER.error('Error updating splits') - _LOGGER.debug('Error: ', exc_info=True) - - -def uwsgi_update_segments(user_config): - """ - Update segments task. - - :param user_config: User-provided configuration. - :type user_config: dict - """ - config = _get_config(user_config) - seconds = config['segmentsRefreshRate'] - metadata = get_metadata(config) - segment_sync = SegmentSynchronizer( - SegmentsAPI( - HttpClient(1500, config.get('sdk_url'), config.get('events_url')), config['apikey'], - metadata - ), - UWSGISplitStorage(get_uwsgi()), - UWSGISegmentStorage(get_uwsgi()), - ) - - pool = workerpool.WorkerPool(20, segment_sync.synchronize_segment) # pylint: disable=protected-access - pool.start() - split_storage = UWSGISplitStorage(get_uwsgi()) - while True: - try: - for segment_name in split_storage.get_segment_names(): - pool.submit_work(segment_name) - time.sleep(seconds) - except Exception: # pylint: disable=broad-except - _LOGGER.error('Error updating segments') - _LOGGER.debug('Error: ', exc_info=True) - - -def uwsgi_report_impressions(user_config): - """ - Flush impressions task. - - :param user_config: User-provided configuration. - :type user_config: dict - """ - config = _get_config(user_config) - metadata = get_metadata(config) - seconds = config['impressionsRefreshRate'] - storage = UWSGIImpressionStorage(get_uwsgi()) - impressions_sync = ImpressionSynchronizer( - ImpressionsAPI( - HttpClient(1500, config.get('sdk_url'), config.get('events_url')), - config['apikey'], - metadata, - config['impressionsMode'] - ), - storage, - config['impressionsBulkSize'] - ) - - while True: - try: - impressions_sync.synchronize_impressions() # pylint: disable=protected-access - for _ in range(0, seconds): - if storage.should_flush(): - storage.acknowledge_flush() - break - time.sleep(1) - except Exception: # pylint: disable=broad-except - _LOGGER.error('Error posting impressions') - _LOGGER.debug('Error: ', exc_info=True) - - -def uwsgi_report_events(user_config): - """ - Flush events task. - - :param user_config: User-provided configuration. - :type user_config: dict - """ - config = _get_config(user_config) - metadata = get_metadata(config) - seconds = config.get('eventsRefreshRate', 30) - storage = UWSGIEventStorage(get_uwsgi()) - events_sync = EventSynchronizer( - EventsAPI( - HttpClient(1500, config.get('sdk_url'), config.get('events_url')), - config['apikey'], - metadata - ), - storage, - config['eventsBulkSize'] - ) - while True: - try: - events_sync.synchronize_events() # pylint: disable=protected-access - for _ in range(0, seconds): - if storage.should_flush(): - storage.acknowledge_flush() - break - time.sleep(1) - except Exception: # pylint: disable=broad-except - _LOGGER.error('Error posting metrics') - _LOGGER.debug('Error: ', exc_info=True) - - -def uwsgi_report_telemetry(user_config): - """ - Flush events task. - - :param user_config: User-provided configuration. - :type user_config: dict - """ - config = _get_config(user_config) - metadata = get_metadata(config) - seconds = config.get('metricsRefreshRate', 30) - storage = UWSGITelemetryStorage(get_uwsgi()) - telemetry_sync = TelemetrySynchronizer( - TelemetryAPI( - HttpClient(1500, config.get('sdk_url'), config.get('events_url')), - config['apikey'], - metadata - ), - storage, - ) - while True: - try: - telemetry_sync.synchronize_telemetry() # pylint: disable=protected-access - time.sleep(seconds) - except Exception: # pylint: disable=broad-except - _LOGGER.error('Error posting metrics') - _LOGGER.debug('Error: ', exc_info=True) diff --git a/splitio/version.py b/splitio/version.py index 33de8d16..432526bd 100644 --- a/splitio/version.py +++ b/splitio/version.py @@ -1 +1 @@ -__version__ = '9.0.0' +__version__ = '9.0.0-uwsgi' diff --git a/tests/client/test_config.py b/tests/client/test_config.py index 0bac9f24..a52600bd 100644 --- a/tests/client/test_config.py +++ b/tests/client/test_config.py @@ -13,7 +13,6 @@ def test_parse_operation_mode(self): assert config._parse_operation_mode('some', {}) == 'inmemory-standalone' assert config._parse_operation_mode('localhost', {}) == 'localhost-standalone' assert config._parse_operation_mode('some', {'redisHost': 'x'}) == 'redis-consumer' - assert config._parse_operation_mode('some', {'uwsgiClient': True}) == 'uwsgi-consumer' def test_sanitize_imp_mode(self): """Test sanitization of impressions mode.""" diff --git a/tests/client/test_factory.py b/tests/client/test_factory.py index 997540e5..8eac8363 100644 --- a/tests/client/test_factory.py +++ b/tests/client/test_factory.py @@ -8,7 +8,7 @@ from splitio.client.factory import get_factory, SplitFactory, _INSTANTIATED_FACTORIES, Status,\ _LOGGER as _logger from splitio.client.config import DEFAULT_CONFIG -from splitio.storage import redis, inmemmory, uwsgi +from splitio.storage import redis, inmemmory from splitio.tasks import events_sync, impressions_sync, split_sync, segment_sync, telemetry_sync from splitio.tasks.util import asynctask from splitio.api.splits import SplitsAPI @@ -142,25 +142,6 @@ def test_redis_client_creation(self, mocker): assert factory.ready factory.destroy() - def test_uwsgi_client_creation(self): - """Test that a client with redis storage is created correctly.""" - factory = get_factory('some_api_key', config={'uwsgiClient': True}) - assert isinstance(factory._get_storage('splits'), uwsgi.UWSGISplitStorage) - assert isinstance(factory._get_storage('segments'), uwsgi.UWSGISegmentStorage) - assert isinstance(factory._get_storage('impressions'), uwsgi.UWSGIImpressionStorage) - assert isinstance(factory._get_storage('events'), uwsgi.UWSGIEventStorage) - assert isinstance(factory._get_storage('telemetry'), uwsgi.UWSGITelemetryStorage) - assert factory._sync_manager is None - assert factory._labels_enabled is True - assert isinstance(factory._recorder, StandardRecorder) - assert isinstance(factory._recorder._impressions_manager, ImpressionsManager) - assert isinstance(factory._recorder._telemetry_storage, inmemmory.TelemetryStorage) - assert isinstance(factory._recorder._event_sotrage, inmemmory.EventStorage) - assert isinstance(factory._recorder._impression_storage, inmemmory.ImpressionStorage) - factory.block_until_ready() - assert factory.ready - factory.destroy() - def test_uwsgi_forked_client_creation(self): """Test client with preforked initialization.""" factory = get_factory('some_api_key', config={'preforkedInitialization': True}) @@ -427,14 +408,11 @@ def _make_factory_with_apikey(apikey, *_, **__): build_in_memory.side_effect = _make_factory_with_apikey build_redis = mocker.Mock() build_redis.side_effect = _make_factory_with_apikey - build_uwsgi = mocker.Mock() - build_uwsgi.side_effect = _make_factory_with_apikey build_localhost = mocker.Mock() build_localhost.side_effect = _make_factory_with_apikey mocker.patch('splitio.client.factory._LOGGER', new=factory_module_logger) mocker.patch('splitio.client.factory._build_in_memory_factory', new=build_in_memory) mocker.patch('splitio.client.factory._build_redis_factory', new=build_redis) - mocker.patch('splitio.client.factory._build_uwsgi_factory', new=build_uwsgi) mocker.patch('splitio.client.factory._build_localhost_factory', new=build_localhost) _INSTANTIATED_FACTORIES.clear() # Clear all factory counters for testing purposes diff --git a/tests/client/test_input_validator.py b/tests/client/test_input_validator.py index b52c2e2c..5ad58a25 100644 --- a/tests/client/test_input_validator.py +++ b/tests/client/test_input_validator.py @@ -1111,12 +1111,6 @@ def test_input_validation_factory(self, mocker): mocker.call("%s: you passed an invalid %s, %s must be a non-empty string.", 'factory_instantiation', 'apikey', 'apikey') ] - logger.reset_mock() - f = get_factory(True, config={'uwsgiClient': True}) - assert f is not None - assert logger.error.mock_calls == [] - f.destroy() - logger.reset_mock() f = get_factory(True, config={'redisHost': 'some-host'}) assert f is not None diff --git a/tests/storage/test_uwsgi.py b/tests/storage/test_uwsgi.py deleted file mode 100644 index e7f06bad..00000000 --- a/tests/storage/test_uwsgi.py +++ /dev/null @@ -1,320 +0,0 @@ -"""UWSGI Storage unit tests.""" -# pylint: disable=no-self-usage -import json - -from splitio.storage.uwsgi import UWSGIEventStorage, UWSGIImpressionStorage, \ - UWSGISegmentStorage, UWSGISplitStorage, UWSGITelemetryStorage - -from splitio.models.splits import Split -from splitio.models.segments import Segment -from splitio.models.impressions import Impression -from splitio.models.events import Event, EventWrapper - -from splitio.storage.adapters.uwsgi_cache import get_uwsgi - - -class UWSGISplitStorageTests(object): - """UWSGI Split Storage test cases.""" - - @staticmethod - def _get_from_raw_mock(mocker): - def _do(raw): - mock_split = mocker.Mock() - mock_split = mocker.Mock(spec=Split) - mock_split.to_json.return_value = raw - split_name = mocker.PropertyMock() - split_name.return_value = raw['name'] - type(mock_split).name = split_name - traffic_type_name = mocker.PropertyMock() - traffic_type_name.return_value = raw['trafficTypeName'] - type(mock_split).traffic_type_name = traffic_type_name - return mock_split - - from_raw_mock = mocker.Mock() - from_raw_mock.side_effect = lambda x: _do(x) - return from_raw_mock - - def test_store_retrieve_split(self, mocker): - """Test storing and retrieving splits.""" - uwsgi = get_uwsgi(True) - storage = UWSGISplitStorage(uwsgi) - from_raw_mock = self._get_from_raw_mock(mocker) - mocker.patch('splitio.models.splits.from_raw', new=from_raw_mock) - - raw_split = {'name': 'some_split', 'trafficTypeName': 'user'} - split = from_raw_mock(raw_split) - - from_raw_mock.reset_mock() # clear mock calls so they don't interfere with the testing itself. - storage.put(split) - - retrieved = storage.get('some_split') - - assert retrieved.name == split.name and retrieved.traffic_type_name == split.traffic_type_name - assert from_raw_mock.mock_calls == [mocker.call(raw_split)] - assert split.to_json.mock_calls == [mocker.call()] - - assert storage.get('nonexistant_split') is None - - storage.remove('some_split') - assert storage.get('some_split') is None - - def test_get_splits(self, mocker): - """Test retrieving a list of passed splits.""" - uwsgi = get_uwsgi(True) - storage = UWSGISplitStorage(uwsgi) - from_raw_mock = self._get_from_raw_mock(mocker) - mocker.patch('splitio.models.splits.from_raw', new=from_raw_mock) - - split_1 = from_raw_mock({'name': 'some_split_1', 'trafficTypeName': 'user'}) - split_2 = from_raw_mock({'name': 'some_split_2', 'trafficTypeName': 'user'}) - storage.put(split_1) - storage.put(split_2) - - splits = storage.fetch_many(['some_split_1', 'some_split_2', 'some_split_3']) - assert len(splits) == 3 - assert splits['some_split_1'].name == 'some_split_1' - assert splits['some_split_2'].name == 'some_split_2' - assert 'some_split_3' in splits - - def test_set_get_changenumber(self, mocker): - """Test setting and retrieving changenumber.""" - uwsgi = get_uwsgi(True) - storage = UWSGISplitStorage(uwsgi) - - assert storage.get_change_number() is None - storage.set_change_number(123) - assert storage.get_change_number() == 123 - - def test_get_split_names(self, mocker): - """Test getting all split names.""" - uwsgi = get_uwsgi(True) - storage = UWSGISplitStorage(uwsgi) - from_raw_mock = self._get_from_raw_mock(mocker) - mocker.patch('splitio.models.splits.from_raw', new=from_raw_mock) - - split_1 = from_raw_mock({'name': 'some_split_1', 'trafficTypeName': 'user'}) - split_2 = from_raw_mock({'name': 'some_split_2', 'trafficTypeName': 'user'}) - storage.put(split_1) - storage.put(split_2) - - assert set(storage.get_split_names()) == set(['some_split_1', 'some_split_2']) - storage.remove('some_split_1') - assert storage.get_split_names() == ['some_split_2'] - - def test_get_all_splits(self, mocker): - """Test fetching all splits.""" - uwsgi = get_uwsgi(True) - storage = UWSGISplitStorage(uwsgi) - from_raw_mock = self._get_from_raw_mock(mocker) - mocker.patch('splitio.models.splits.from_raw', new=from_raw_mock) - - split_1 = from_raw_mock({'name': 'some_split_1', 'trafficTypeName': 'user'}) - split_2 = from_raw_mock({'name': 'some_split_2', 'trafficTypeName': 'user'}) - storage.put(split_1) - storage.put(split_2) - - splits = storage.get_all_splits() - s1 = next(split for split in splits if split.name == 'some_split_1') - s2 = next(split for split in splits if split.name == 'some_split_2') - - assert s1.traffic_type_name == 'user' - assert s2.traffic_type_name == 'user' - - def test_is_valid_traffic_type(self, mocker): - """Test that traffic type validation works properly.""" - uwsgi = get_uwsgi(True) - storage = UWSGISplitStorage(uwsgi) - from_raw_mock = self._get_from_raw_mock(mocker) - mocker.patch('splitio.models.splits.from_raw', new=from_raw_mock) - - split_1 = from_raw_mock({'name': 'some_split_1', 'trafficTypeName': 'user'}) - storage.put(split_1) - assert storage.is_valid_traffic_type('user') is True - assert storage.is_valid_traffic_type('account') is False - - split_2 = from_raw_mock({'name': 'some_split_2', 'trafficTypeName': 'account'}) - storage.put(split_2) - assert storage.is_valid_traffic_type('user') is True - assert storage.is_valid_traffic_type('account') is True - - split_3 = from_raw_mock({'name': 'some_split_3', 'trafficTypeName': 'user'}) - storage.put(split_3) - assert storage.is_valid_traffic_type('user') is True - assert storage.is_valid_traffic_type('account') is True - - storage.remove('some_split_1') - assert storage.is_valid_traffic_type('user') is True - assert storage.is_valid_traffic_type('account') is True - - storage.remove('some_split_2') - assert storage.is_valid_traffic_type('user') is True - assert storage.is_valid_traffic_type('account') is False - - storage.remove('some_split_3') - assert storage.is_valid_traffic_type('user') is False - assert storage.is_valid_traffic_type('account') is False - - def test_kill_locally(self): - """Test kill local.""" - uwsgi = get_uwsgi(True) - storage = UWSGISplitStorage(uwsgi) - - split = Split('some_split', 123456789, False, 'some', 'traffic_type', - 'ACTIVE', 1) - storage.put(split) - storage.set_change_number(1) - - storage.kill_locally('test', 'default_treatment', 2) - assert storage.get('test') is None - - storage.kill_locally('some_split', 'default_treatment', 0) - assert storage.get('some_split').change_number == 1 - assert storage.get('some_split').killed is False - assert storage.get('some_split').default_treatment == 'some' - - storage.kill_locally('some_split', 'default_treatment', 3) - assert storage.get('some_split').change_number == 3 - - -class UWSGISegmentStorageTests(object): - """UWSGI Segment storage test cases.""" - - def test_store_retrieve_segment(self, mocker): - """Test storing and fetching segments.""" - uwsgi = get_uwsgi(True) - storage = UWSGISegmentStorage(uwsgi) - segment = mocker.Mock(spec=Segment) - segment_keys = mocker.PropertyMock() - segment_keys.return_value = ['abc'] - type(segment).keys = segment_keys - segment.to_json = {} - segment_name = mocker.PropertyMock() - segment_name.return_value = 'some_segment' - segment_change_number = mocker.PropertyMock() - segment_change_number.return_value = 123 - type(segment).name = segment_name - type(segment).change_number = segment_change_number - from_raw_mock = mocker.Mock() - from_raw_mock.return_value = 'ok' - mocker.patch('splitio.models.segments.from_raw', new=from_raw_mock) - - storage.put(segment) - assert storage.get('some_segment') == 'ok' - assert from_raw_mock.mock_calls == [mocker.call({'till': 123, 'removed': [], 'added': [u'abc'], 'name': 'some_segment'})] - assert storage.get('nonexistant-segment') is None - - def test_get_set_change_number(self, mocker): - """Test setting and getting change number.""" - uwsgi = get_uwsgi(True) - storage = UWSGISegmentStorage(uwsgi) - assert storage.get_change_number('some_segment') is None - storage.set_change_number('some_segment', 123) - assert storage.get_change_number('some_segment') == 123 - - def test_segment_contains(self, mocker): - """Test that segment contains works properly.""" - uwsgi = get_uwsgi(True) - storage = UWSGISegmentStorage(uwsgi) - - from_raw_mock = mocker.Mock() - from_raw_mock.return_value = Segment('some_segment', ['abc'], 123) - mocker.patch('splitio.models.segments.from_raw', new=from_raw_mock) - segment = mocker.Mock(spec=Segment) - segment_keys = mocker.PropertyMock() - segment_keys.return_value = ['abc'] - type(segment).keys = segment_keys - segment.to_json = {} - segment_name = mocker.PropertyMock() - segment_name.return_value = 'some_segment' - segment_change_number = mocker.PropertyMock() - segment_change_number.return_value = 123 - type(segment).name = segment_name - type(segment).change_number = segment_change_number - storage.put(segment) - - assert storage.segment_contains('some_segment', 'abc') - assert not storage.segment_contains('some_segment', 'qwe') - - -class UWSGIImpressionsStorageTests(object): - """UWSGI Impressions storage test cases.""" - - def test_put_pop_impressions(self, mocker): - """Test storing and fetching impressions.""" - uwsgi = get_uwsgi(True) - storage = UWSGIImpressionStorage(uwsgi) - impressions = [ - Impression('key1', 'feature1', 'on', 'some_label', 123456, 'buck1', 321654), - Impression('key2', 'feature2', 'on', 'some_label', 123456, 'buck1', 321654), - Impression('key3', 'feature2', 'on', 'some_label', 123456, 'buck1', 321654), - Impression('key4', 'feature1', 'on', 'some_label', 123456, 'buck1', 321654) - ] - storage.put(impressions) - res = storage.pop_many(10) - assert res == impressions - - def test_flush(self): - """Test requesting, querying and acknowledging a flush.""" - uwsgi = get_uwsgi(True) - storage = UWSGIImpressionStorage(uwsgi) - assert storage.should_flush() is False - storage.request_flush() - assert storage.should_flush() is True - storage.acknowledge_flush() - assert storage.should_flush() is False - - -class UWSGIEventsStorageTests(object): - """UWSGI Events storage test cases.""" - - def test_put_pop_events(self, mocker): - """Test storing and fetching events.""" - uwsgi = get_uwsgi(True) - storage = UWSGIEventStorage(uwsgi) - events = [ - EventWrapper(event=Event('key1', 'user', 'purchase', 10, 123456, None), size=32768), - EventWrapper(event=Event('key2', 'user', 'purchase', 10, 123456, None), size=32768), - EventWrapper(event=Event('key3', 'user', 'purchase', 10, 123456, None), size=32768), - EventWrapper(event=Event('key4', 'user', 'purchase', 10, 123456, None), size=32768), - ] - - storage.put(events) - res = storage.pop_many(10) - assert res == [ - Event('key1', 'user', 'purchase', 10, 123456, None), - Event('key2', 'user', 'purchase', 10, 123456, None), - Event('key3', 'user', 'purchase', 10, 123456, None), - Event('key4', 'user', 'purchase', 10, 123456, None) - ] - - -class UWSGITelemetryStorageTests(object): - """UWSGI-based telemetry storage test cases.""" - - def test_latencies(self): - """Test storing and popping latencies.""" - storage = UWSGITelemetryStorage(get_uwsgi(True)) - storage.inc_latency('some_latency', 2) - storage.inc_latency('some_latency', 2) - storage.inc_latency('some_latency', 2) - assert storage.pop_latencies() == { - 'some_latency': [0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] - } - assert storage.pop_latencies() == {} - - def test_counters(self): - """Test storing and popping counters.""" - storage = UWSGITelemetryStorage(get_uwsgi(True)) - storage.inc_counter('some_counter') - storage.inc_counter('some_counter') - storage.inc_counter('some_counter') - assert storage.pop_counters() == {'some_counter': 3} - assert storage.pop_counters() == {} - - def test_gauges(self): - """Test storing and popping gauges.""" - storage = UWSGITelemetryStorage(get_uwsgi(True)) - storage.put_gauge('some_gauge1', 123) - storage.put_gauge('some_gauge2', 456) - assert storage.pop_gauges() == {'some_gauge1': 123, 'some_gauge2': 456} - assert storage.pop_gauges() == {} diff --git a/tests/tasks/test_uwsgi_wrappers.py b/tests/tasks/test_uwsgi_wrappers.py deleted file mode 100644 index 71b5614a..00000000 --- a/tests/tasks/test_uwsgi_wrappers.py +++ /dev/null @@ -1,137 +0,0 @@ -"""UWSGI Task wrappers test module.""" -# pylint: disable=no-self-use,protected-access -from splitio.storage import SplitStorage -from splitio.tasks.util.workerpool import WorkerPool -from splitio.storage.uwsgi import UWSGISplitStorage -from splitio.tasks.uwsgi_wrappers import uwsgi_update_splits, uwsgi_update_segments, \ - uwsgi_report_events, uwsgi_report_impressions, uwsgi_report_telemetry -from splitio.sync.split import SplitSynchronizer -from splitio.sync.segment import SegmentSynchronizer -from splitio.sync.impression import ImpressionSynchronizer -from splitio.sync.event import EventSynchronizer -from splitio.sync.telemetry import TelemetrySynchronizer - - -class NonCatchableException(BaseException): - """Exception to be used to stop sync task's infinite loop.""" - - pass - - -class TaskWrappersTests(object): - """Task wrappers task test cases.""" - - def test_update_splits(self, mocker): - """Test split sync task wrapper.""" - data = {'executions': 0} - - def _update_splits_side_effect(*_, **__): - data['executions'] += 1 - if data['executions'] > 1: - raise NonCatchableException('asd') - - stmock = mocker.Mock(spec=SplitSynchronizer) - stmock.synchronize_splits.side_effect = _update_splits_side_effect - stmock_class = mocker.Mock(spec=SplitSynchronizer) - stmock_class.return_value = stmock - mocker.patch('splitio.tasks.uwsgi_wrappers.SplitSynchronizer', new=stmock_class) - - try: - uwsgi_update_splits({'apikey': 'asd', 'featuresRefreshRate': 1}) - except NonCatchableException: - # Make sure that the task was called before being forced to stop. - assert data['executions'] > 1 - assert len(stmock.synchronize_splits.mock_calls) > 1 - - def test_update_segments(self, mocker): - """Test split sync task wrapper.""" - data = {'executions': 0} - - def _submit_work(*_, **__): - data['executions'] += 1 - # we mock 2 segments, so we expect this to be called at least twice before ending. - if data['executions'] > 2: - raise NonCatchableException('asd') - - wpmock = mocker.Mock(spec=WorkerPool) - wpmock.submit_work.side_effect = _submit_work - wpmock_class = mocker.Mock(spec=WorkerPool) - wpmock_class.return_value = wpmock - mocker.patch('splitio.tasks.uwsgi_wrappers.workerpool.WorkerPool', new=wpmock_class) - - mocked_update_segment = mocker.patch.object(SplitStorage, 'get_segment_names') - mocked_update_segment.return_value = ['segment1', 'segment2'] - mocked_split_storage_instance = UWSGISplitStorage(True) - split_storage_mock = mocker.Mock(spec=UWSGISplitStorage) - split_storage_mock.return_value = mocked_split_storage_instance - - mocker.patch('splitio.tasks.uwsgi_wrappers.UWSGISplitStorage', new=split_storage_mock) - - try: - uwsgi_update_segments({'apikey': 'asd', 'segmentsRefreshRate': 1}) - except NonCatchableException: - # Make sure that the task was called before being forced to stop. - assert data['executions'] > 2 - assert len(wpmock.submit_work.mock_calls) > 2 - - def test_post_impressions(self, mocker): - """Test split sync task wrapper.""" - data = {'executions': 0} - - def _report_impressions_side_effect(*_, **__): - data['executions'] += 1 - if data['executions'] > 1: - raise NonCatchableException('asd') - - stmock = mocker.Mock(spec=ImpressionSynchronizer) - stmock.synchronize_impressions.side_effect = _report_impressions_side_effect - stmock_class = mocker.Mock(spec=ImpressionSynchronizer) - stmock_class.return_value = stmock - mocker.patch('splitio.tasks.uwsgi_wrappers.ImpressionSynchronizer', new=stmock_class) - try: - uwsgi_report_impressions({'apikey': 'asd', 'impressionsRefreshRate': 1}) - except NonCatchableException: - # Make sure that the task was called before being forced to stop. - assert data['executions'] > 1 - # TODO: Test impressions flushing. - - def test_post_events(self, mocker): - """Test split sync task wrapper.""" - data = {'executions': 0} - - def _send_events_side_effect(*_, **__): - data['executions'] += 1 - if data['executions'] > 1: - raise NonCatchableException('asd') - - stmock = mocker.Mock(spec=EventSynchronizer) - stmock.synchronize_events.side_effect = _send_events_side_effect - stmock_class = mocker.Mock(spec=EventSynchronizer) - stmock_class.return_value = stmock - mocker.patch('splitio.tasks.uwsgi_wrappers.EventSynchronizer', new=stmock_class) - try: - uwsgi_report_events({'apikey': 'asd', 'eventsRefreshRate': 1}) - except NonCatchableException: - # Make sure that the task was called before being forced to stop. - assert data['executions'] > 1 - # TODO: Test impressions flushing. - - def test_post_telemetry(self, mocker): - """Test split sync task wrapper.""" - data = {'executions': 0} - - def _flush_telemetry_side_effect(*_, **__): - data['executions'] += 1 - if data['executions'] > 1: - raise NonCatchableException('asd') - - stmock = mocker.Mock(spec=TelemetrySynchronizer) - stmock.synchronize_telemetry.side_effect = _flush_telemetry_side_effect - stmock_class = mocker.Mock(spec=TelemetrySynchronizer) - stmock_class.return_value = stmock - mocker.patch('splitio.tasks.uwsgi_wrappers.TelemetrySynchronizer', new=stmock_class) - try: - uwsgi_report_telemetry({'apikey': 'asd', 'metricsRefreshRate': 1}) - except NonCatchableException: - # Make sure that the task was called before being forced to stop. - assert data['executions'] > 1 From 7850aa64a2f0b9748a55aeb7bdc408806da4b499 Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Fri, 30 Apr 2021 10:57:42 -0300 Subject: [PATCH 15/17] preparing release --- CHANGES.txt | 7 ++++--- splitio/client/config.py | 2 -- splitio/version.py | 2 +- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 2ba02431..dbdf20ba 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,8 +1,9 @@ -9.0.0 (Apr 28, 2021) - - BREAKING CHANGE: Deprecated Python2. +9.0.0 (Apr 30, 2021) + - BREAKING CHANGE: Removed splitSdkMachineIp and splitSdkMachineName configs. + - BREAKING CHANGE: Deprecated uWSGI local cache. + - BREAKING CHANGE: Deprecated Python2 support. - Removed six, future and futures libs for compatibility between Python2 and Python3. - Updated strings encoding to utf-8 by default for Redis. - - Deprecated uWSGI cache. 8.4.1 (Apr 16, 2021) - Bumped mmh3cffi dependency which now requires c99 flag to build. diff --git a/splitio/client/config.py b/splitio/client/config.py index 0bdb9843..84141f9c 100644 --- a/splitio/client/config.py +++ b/splitio/client/config.py @@ -11,8 +11,6 @@ DEFAULT_CONFIG = { 'operationMode': 'in-memory', 'connectionTimeout': 1500, - 'splitSdkMachineName': None, - 'splitSdkMachineIp': None, 'streamingEnabled': True, 'featuresRefreshRate': 30, 'segmentsRefreshRate': 30, diff --git a/splitio/version.py b/splitio/version.py index 432526bd..c37cb6f7 100644 --- a/splitio/version.py +++ b/splitio/version.py @@ -1 +1 @@ -__version__ = '9.0.0-uwsgi' +__version__ = '9.0.0-all' From b2478409e7dd9e6eed184e842f1f61fd51f4005c Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Fri, 30 Apr 2021 14:54:10 -0300 Subject: [PATCH 16/17] added metadata into streaming client --- CHANGES.txt | 2 + splitio/api/__init__.py | 13 ++++++- splitio/client/config.py | 1 - splitio/client/factory.py | 5 ++- splitio/push/manager.py | 12 ++++-- splitio/push/splitsse.py | 16 ++++++-- splitio/storage/adapters/redis.py | 2 - splitio/sync/manager.py | 13 ++++++- splitio/version.py | 2 +- tests/api/test_util.py | 39 ++++++++++++++++++++ tests/client/test_factory.py | 7 ++-- tests/push/test_manager.py | 24 +++++++----- tests/push/test_splitsse.py | 21 +++++++++-- tests/storage/adapters/test_redis_adapter.py | 3 -- tests/sync/test_manager.py | 7 +++- 15 files changed, 128 insertions(+), 39 deletions(-) create mode 100644 tests/api/test_util.py diff --git a/CHANGES.txt b/CHANGES.txt index dbdf20ba..4cf6037b 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -4,6 +4,8 @@ - BREAKING CHANGE: Deprecated Python2 support. - Removed six, future and futures libs for compatibility between Python2 and Python3. - Updated strings encoding to utf-8 by default for Redis. + - BREAKING CHANGE: Deprecated `redisCharset` config. + - Added SDK Metadata headers to streaming client. 8.4.1 (Apr 16, 2021) - Bumped mmh3cffi dependency which now requires c99 flag to build. diff --git a/splitio/api/__init__.py b/splitio/api/__init__.py index 96a3e224..aff06a51 100644 --- a/splitio/api/__init__.py +++ b/splitio/api/__init__.py @@ -15,20 +15,29 @@ def status_code(self): return self._status_code -def headers_from_metadata(sdk_metadata): +def headers_from_metadata(sdk_metadata, client_key=None): """ Generate a dict with headers required by data-recording API endpoints. :param sdk_metadata: SDK Metadata object, generated at sdk initialization time. :type sdk_metadata: splitio.client.util.SdkMetadata + :param client_key: client key. + :type client_key: str + :return: A dictionary with headers. :rtype: dict """ - return { + + metadata = { 'SplitSDKVersion': sdk_metadata.sdk_version, 'SplitSDKMachineIP': sdk_metadata.instance_ip, 'SplitSDKMachineName': sdk_metadata.instance_name } if sdk_metadata.instance_ip != 'NA' and sdk_metadata.instance_ip != 'unknown' else { 'SplitSDKVersion': sdk_metadata.sdk_version, } + + if client_key is not None: + metadata['SplitSDKClientKey'] = client_key + + return metadata diff --git a/splitio/client/config.py b/splitio/client/config.py index 84141f9c..32992174 100644 --- a/splitio/client/config.py +++ b/splitio/client/config.py @@ -39,7 +39,6 @@ 'redisUnixSocketPath': None, 'redisEncoding': 'utf-8', 'redisEncodingErrors': 'strict', - 'redisCharset': 'utf-8', 'redisErrors': None, 'redisDecodeResponses': True, 'redisRetryOnTimeout': False, diff --git a/splitio/client/factory.py b/splitio/client/factory.py index e2e56990..3c2c7cc4 100644 --- a/splitio/client/factory.py +++ b/splitio/client/factory.py @@ -362,7 +362,7 @@ def _build_in_memory_factory(api_key, cfg, sdk_url=None, events_url=None, # pyl sdk_ready_flag = threading.Event() if not preforked_initialization else None manager = Manager(sdk_ready_flag, synchronizer, apis['auth'], cfg['streamingEnabled'], - streaming_api_base_url) + sdk_metadata, streaming_api_base_url, api_key[-4:]) storages['events'].set_queue_full_hook(tasks.events_task.flush) storages['impressions'].set_queue_full_hook(tasks.impressions_task.flush) @@ -439,9 +439,10 @@ def _build_localhost_factory(cfg): ), None, None, None, None, None, ) + sdk_metadata = util.get_metadata(cfg) ready_event = threading.Event() synchronizer = LocalhostSynchronizer(synchronizers, tasks) - manager = Manager(ready_event, synchronizer, None, False) + manager = Manager(ready_event, synchronizer, None, False, sdk_metadata) manager.start() recorder = StandardRecorder( ImpressionsManager(cfg['impressionsMode'], True, None), diff --git a/splitio/push/manager.py b/splitio/push/manager.py index 1e529b66..fb75464b 100644 --- a/splitio/push/manager.py +++ b/splitio/push/manager.py @@ -20,7 +20,7 @@ class PushManager(object): # pylint:disable=too-many-instance-attributes """Push notifications susbsytem manager.""" - def __init__(self, auth_api, synchronizer, feedback_loop, sse_url=None): + def __init__(self, auth_api, synchronizer, feedback_loop, sdk_metadata, sse_url=None, client_key=None): """ Class constructor. @@ -33,8 +33,14 @@ def __init__(self, auth_api, synchronizer, feedback_loop, sse_url=None): :param feedback_loop: queue where push status updates are published. :type feedback_loop: queue.Queue + :param sdk_metadata: SDK version & machine name & IP. + :type sdk_metadata: splitio.client.util.SdkMetadata + :param sse_url: streaming base url. :type sse_url: str + + :param client_key: client key. + :type client_key: str """ self._auth_api = auth_api self._feedback_loop = feedback_loop @@ -52,8 +58,8 @@ def __init__(self, auth_api, synchronizer, feedback_loop, sse_url=None): } kwargs = {} if sse_url is None else {'base_url': sse_url} - self._sse_client = SplitSSEClient(self._event_handler, self._handle_connection_ready, - self._handle_connection_end, **kwargs) + self._sse_client = SplitSSEClient(self._event_handler, sdk_metadata, self._handle_connection_ready, + self._handle_connection_end, client_key, **kwargs) self._running = False self._next_refresh = Timer(0, lambda: 0) diff --git a/splitio/push/splitsse.py b/splitio/push/splitsse.py index 52459378..e80e1549 100644 --- a/splitio/push/splitsse.py +++ b/splitio/push/splitsse.py @@ -4,6 +4,7 @@ from enum import Enum from splitio.push.sse import SSEClient, SSE_EVENT_ERROR from splitio.util.threadutil import EventGroup +from splitio.api import headers_from_metadata _LOGGER = logging.getLogger(__name__) @@ -20,14 +21,18 @@ class _Status(Enum): ERRORED = 2 CONNECTED = 3 - def __init__(self, event_callback, first_event_callback=None, - connection_closed_callback=None, base_url='https://streaming.split.io'): + def __init__(self, event_callback, sdk_metadata, first_event_callback=None, + connection_closed_callback=None, client_key=None, + base_url='https://streaming.split.io'): """ Construct a split sse client. :param callback: fuction to call when an event is received. :type callback: callable + :param sdk_metadata: SDK version & machine name & IP. + :type sdk_metadata: splitio.client.util.SdkMetadata + :param first_event_callback: function to call when the first event is received. :type first_event_callback: callable @@ -36,6 +41,9 @@ def __init__(self, event_callback, first_event_callback=None, :param base_url: scheme + :// + host :type base_url: str + + :param client_key: client key. + :type client_key: str """ self._client = SSEClient(self._raw_event_handler) self._callback = event_callback @@ -45,6 +53,7 @@ def __init__(self, event_callback, first_event_callback=None, self._status = SplitSSEClient._Status.IDLE self._sse_first_event = None self._sse_connection_closed = None + self._metadata = headers_from_metadata(sdk_metadata, client_key) def _raw_event_handler(self, event): """ @@ -117,7 +126,8 @@ def start(self, token): def connect(url): """Connect to sse in a blocking manner.""" try: - self._client.start(url, timeout=self.KEEPALIVE_TIMEOUT) + self._client.start(url, timeout=self.KEEPALIVE_TIMEOUT, + extra_headers=self._metadata) finally: self._status = SplitSSEClient._Status.IDLE self._sse_connection_closed.set() diff --git a/splitio/storage/adapters/redis.py b/splitio/storage/adapters/redis.py index 35575eff..c0cf9e75 100644 --- a/splitio/storage/adapters/redis.py +++ b/splitio/storage/adapters/redis.py @@ -353,7 +353,6 @@ def _build_default_client(config): # pylint: disable=too-many-locals unix_socket_path = config.get('redisUnixSocketPath', None) encoding = config.get('redisEncoding', 'utf-8') encoding_errors = config.get('redisEncodingErrors', 'strict') - charset = config.get('redisCharset', 'utf-8') errors = config.get('redisErrors', None) decode_responses = config.get('redisDecodeResponses', True) retry_on_timeout = config.get('redisRetryOnTimeout', False) @@ -378,7 +377,6 @@ def _build_default_client(config): # pylint: disable=too-many-locals unix_socket_path=unix_socket_path, encoding=encoding, encoding_errors=encoding_errors, - charset=charset, errors=errors, decode_responses=decode_responses, retry_on_timeout=retry_on_timeout, diff --git a/splitio/sync/manager.py b/splitio/sync/manager.py index 035ba838..700f2dfe 100644 --- a/splitio/sync/manager.py +++ b/splitio/sync/manager.py @@ -16,7 +16,7 @@ class Manager(object): # pylint:disable=too-many-instance-attributes _CENTINEL_EVENT = object() - def __init__(self, ready_flag, synchronizer, auth_api, streaming_enabled, sse_url=None): # pylint:disable=too-many-arguments + def __init__(self, ready_flag, synchronizer, auth_api, streaming_enabled, sdk_metadata, sse_url=None, client_key=None): # pylint:disable=too-many-arguments """ Construct Manager. @@ -29,8 +29,17 @@ def __init__(self, ready_flag, synchronizer, auth_api, streaming_enabled, sse_ur :param auth_api: Authentication api client :type auth_api: splitio.api.auth.AuthAPI + :param sdk_metadata: SDK version & machine name & IP. + :type sdk_metadata: splitio.client.util.SdkMetadata + :param streaming_enabled: whether to use streaming or not :type streaming_enabled: bool + + :param sse_url: streaming base url. + :type sse_url: str + + :param client_key: client key. + :type client_key: str """ self._streaming_enabled = streaming_enabled self._ready_flag = ready_flag @@ -39,7 +48,7 @@ def __init__(self, ready_flag, synchronizer, auth_api, streaming_enabled, sse_ur self._push_status_handler_active = True self._backoff = Backoff() self._queue = Queue() - self._push = PushManager(auth_api, synchronizer, self._queue, sse_url) + self._push = PushManager(auth_api, synchronizer, self._queue, sdk_metadata, sse_url, client_key) self._push_status_handler = Thread(target=self._streaming_feedback_handler, name='PushStatusHandler') self._push_status_handler.setDaemon(True) diff --git a/splitio/version.py b/splitio/version.py index c37cb6f7..e2921057 100644 --- a/splitio/version.py +++ b/splitio/version.py @@ -1 +1 @@ -__version__ = '9.0.0-all' +__version__ = '9.0.0-rc1' diff --git a/tests/api/test_util.py b/tests/api/test_util.py new file mode 100644 index 00000000..a3c7a20b --- /dev/null +++ b/tests/api/test_util.py @@ -0,0 +1,39 @@ +"""Split API tests module.""" + +import pytest + +from splitio.api import headers_from_metadata + +from splitio.client.util import SdkMetadata + + +class UtilTests(object): + """Util test cases.""" + + def test_headers_from_metadata(self, mocker): + """Test headers from metadata call.""" + metadata = headers_from_metadata(SdkMetadata('1.0', 'some', '1.2.3.4')) + assert metadata['SplitSDKVersion'] == '1.0' + assert metadata['SplitSDKMachineIP'] == '1.2.3.4' + assert metadata['SplitSDKMachineName'] == 'some' + assert 'SplitSDKClientKey' not in metadata + + metadata = headers_from_metadata(SdkMetadata('1.0', 'some', '1.2.3.4'), 'abcd') + assert metadata['SplitSDKVersion'] == '1.0' + assert metadata['SplitSDKMachineIP'] == '1.2.3.4' + assert metadata['SplitSDKMachineName'] == 'some' + assert metadata['SplitSDKClientKey'] == 'abcd' + + metadata = headers_from_metadata(SdkMetadata('1.0', 'some', 'NA')) + assert metadata['SplitSDKVersion'] == '1.0' + assert 'SplitSDKMachineIP' not in metadata + assert 'SplitSDKMachineName' not in metadata + assert 'SplitSDKClientKey' not in metadata + + metadata = headers_from_metadata(SdkMetadata('1.0', 'some', 'unknown')) + assert metadata['SplitSDKVersion'] == '1.0' + assert 'SplitSDKMachineIP' not in metadata + assert 'SplitSDKMachineName' not in metadata + assert 'SplitSDKClientKey' not in metadata + + diff --git a/tests/client/test_factory.py b/tests/client/test_factory.py index 8eac8363..854c184e 100644 --- a/tests/client/test_factory.py +++ b/tests/client/test_factory.py @@ -32,7 +32,7 @@ def test_inmemory_client_creation_streaming_false(self, mocker): """Test that a client with in-memory storage is created correctly.""" # Setup synchronizer - def _split_synchronizer(self, ready_flag, synchronizer, auth_api, streaming_enabled, sse_url=None): + def _split_synchronizer(self, ready_flag, some, auth_api, streaming_enabled, sdk_matadata, sse_url=None, client_key=None): synchronizer = mocker.Mock(spec=Synchronizer) synchronizer.sync_all.return_values = None self._ready_flag = ready_flag @@ -120,7 +120,6 @@ def test_redis_client_creation(self, mocker): unix_socket_path='/some_path', encoding='utf-8', encoding_errors='non-strict', - charset='utf-8', errors=True, decode_responses=True, retry_on_timeout=True, @@ -233,7 +232,7 @@ def _imppression_count_task_init_mock(self, synchronize_counters): imp_count_async_task_mock) # Setup synchronizer - def _split_synchronizer(self, ready_flag, some, auth_api, streaming_enabled, sse_url=None): + def _split_synchronizer(self, ready_flag, some, auth_api, streaming_enabled, sdk_matadata, sse_url=None, client_key=None): synchronizer = Synchronizer(syncs, tasks) self._ready_flag = ready_flag self._synchronizer = synchronizer @@ -327,7 +326,7 @@ def _imppression_count_task_init_mock(self, synchronize_counters): imp_count_async_task_mock) # Setup synchronizer - def _split_synchronizer(self, ready_flag, some, auth_api, streaming_enabled, sse_url=None): + def _split_synchronizer(self, ready_flag, some, auth_api, streaming_enabled, sdk_matadata, sse_url=None, client_key=None): synchronizer = Synchronizer(syncs, tasks) self._ready_flag = ready_flag self._synchronizer = synchronizer diff --git a/tests/push/test_manager.py b/tests/push/test_manager.py index 077f0d76..d4b48bc1 100644 --- a/tests/push/test_manager.py +++ b/tests/push/test_manager.py @@ -2,9 +2,12 @@ #pylint:disable=no-self-use,protected-access from threading import Thread from queue import Queue + from splitio.api.auth import APIException -from splitio.push.sse import SSEEvent + from splitio.models.token import Token + +from splitio.push.sse import SSEEvent from splitio.push.parser import parse_incoming_event, EventType, ControlType, ControlMessage, \ OccupancyMessage, SplitChangeUpdate, SplitKillUpdate, SegmentChangeUpdate from splitio.push.processor import MessageProcessor @@ -12,6 +15,7 @@ from splitio.push.manager import PushManager, _TOKEN_REFRESH_GRACE_PERIOD from splitio.push.splitsse import SplitSSEClient from splitio.push.status_tracker import Status + from tests.helpers import Any @@ -30,7 +34,7 @@ def test_connection_success(self, mocker): mocker.patch('splitio.push.manager.Timer', new=timer_mock) mocker.patch('splitio.push.manager.SplitSSEClient', new=sse_constructor_mock) feedback_loop = Queue() - manager = PushManager(api_mock, mocker.Mock(), feedback_loop) + manager = PushManager(api_mock, mocker.Mock(), feedback_loop, mocker.Mock()) def new_start(*args, **kwargs): # pylint: disable=unused-argument """splitsse.start mock.""" @@ -63,7 +67,7 @@ def test_connection_failure(self, mocker): mocker.patch('splitio.push.manager.Timer', new=timer_mock) mocker.patch('splitio.push.manager.SplitSSEClient', new=sse_constructor_mock) feedback_loop = Queue() - manager = PushManager(api_mock, mocker.Mock(), feedback_loop) + manager = PushManager(api_mock, mocker.Mock(), feedback_loop, mocker.Mock()) def new_start(*args, **kwargs): # pylint: disable=unused-argument """splitsse.start mock.""" @@ -90,7 +94,7 @@ def test_push_disabled(self, mocker): mocker.patch('splitio.push.manager.Timer', new=timer_mock) mocker.patch('splitio.push.manager.SplitSSEClient', new=sse_constructor_mock) feedback_loop = Queue() - manager = PushManager(api_mock, mocker.Mock(), feedback_loop) + manager = PushManager(api_mock, mocker.Mock(), feedback_loop, mocker.Mock()) manager.start() assert feedback_loop.get() == Status.PUSH_NONRETRYABLE_ERROR assert timer_mock.mock_calls == [mocker.call(0, Any())] @@ -109,7 +113,7 @@ def test_auth_apiexception(self, mocker): mocker.patch('splitio.push.manager.SplitSSEClient', new=sse_constructor_mock) feedback_loop = Queue() - manager = PushManager(api_mock, mocker.Mock(), feedback_loop) + manager = PushManager(api_mock, mocker.Mock(), feedback_loop, mocker.Mock()) manager.start() assert feedback_loop.get() == Status.PUSH_RETRYABLE_ERROR assert timer_mock.mock_calls == [mocker.call(0, Any())] @@ -126,7 +130,7 @@ def test_split_change(self, mocker): processor_mock = mocker.Mock(spec=MessageProcessor) mocker.patch('splitio.push.manager.MessageProcessor', new=processor_mock) - manager = PushManager(mocker.Mock(), mocker.Mock(), mocker.Mock()) + manager = PushManager(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) manager._event_handler(sse_event) assert parse_event_mock.mock_calls == [mocker.call(sse_event)] assert processor_mock.mock_calls == [ @@ -145,7 +149,7 @@ def test_split_kill(self, mocker): processor_mock = mocker.Mock(spec=MessageProcessor) mocker.patch('splitio.push.manager.MessageProcessor', new=processor_mock) - manager = PushManager(mocker.Mock(), mocker.Mock(), mocker.Mock()) + manager = PushManager(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) manager._event_handler(sse_event) assert parse_event_mock.mock_calls == [mocker.call(sse_event)] assert processor_mock.mock_calls == [ @@ -164,7 +168,7 @@ def test_segment_change(self, mocker): processor_mock = mocker.Mock(spec=MessageProcessor) mocker.patch('splitio.push.manager.MessageProcessor', new=processor_mock) - manager = PushManager(mocker.Mock(), mocker.Mock(), mocker.Mock()) + manager = PushManager(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) manager._event_handler(sse_event) assert parse_event_mock.mock_calls == [mocker.call(sse_event)] assert processor_mock.mock_calls == [ @@ -183,7 +187,7 @@ def test_control_message(self, mocker): status_tracker_mock = mocker.Mock(spec=PushStatusTracker) mocker.patch('splitio.push.manager.PushStatusTracker', new=status_tracker_mock) - manager = PushManager(mocker.Mock(), mocker.Mock(), mocker.Mock()) + manager = PushManager(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) manager._event_handler(sse_event) assert parse_event_mock.mock_calls == [mocker.call(sse_event)] assert status_tracker_mock.mock_calls == [ @@ -202,7 +206,7 @@ def test_occupancy_message(self, mocker): status_tracker_mock = mocker.Mock(spec=PushStatusTracker) mocker.patch('splitio.push.manager.PushStatusTracker', new=status_tracker_mock) - manager = PushManager(mocker.Mock(), mocker.Mock(), mocker.Mock()) + manager = PushManager(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) manager._event_handler(sse_event) assert parse_event_mock.mock_calls == [mocker.call(sse_event)] assert status_tracker_mock.mock_calls == [ diff --git a/tests/push/test_splitsse.py b/tests/push/test_splitsse.py index 7d646a65..ebb8fa94 100644 --- a/tests/push/test_splitsse.py +++ b/tests/push/test_splitsse.py @@ -3,11 +3,16 @@ import time from queue import Queue import pytest + from splitio.models.token import Token + from splitio.push.splitsse import SplitSSEClient from splitio.push.sse import SSEEvent + from tests.helpers.mockserver import SSEMockServer +from splitio.client.util import SdkMetadata + class SSEClientTests(object): """SSEClient test cases.""" @@ -36,8 +41,8 @@ def on_disconnect(): server = SSEMockServer(request_queue) server.start() - client = SplitSSEClient(handler, on_connect, on_disconnect, - base_url='http://localhost:' + str(server.port())) + client = SplitSSEClient(handler, SdkMetadata('1.0', 'some', '1.2.3.4'), on_connect, on_disconnect, + 'abcd', base_url='http://localhost:' + str(server.port())) token = Token(True, 'some', {'chan1': ['subscribe'], 'chan2': ['subscribe', 'channel-metadata:publishers']}, 1, 2) @@ -55,6 +60,10 @@ def on_disconnect(): request = request_queue.get(1) assert request.path == '/event-stream?v=1.1&accessToken=some&channels=chan1,[?occupancy=metrics.publishers]chan2' assert request.headers['accept'] == 'text/event-stream' + assert request.headers['SplitSDKVersion'] == '1.0' + assert request.headers['SplitSDKMachineIP'] == '1.2.3.4' + assert request.headers['SplitSDKMachineName'] == 'some' + assert request.headers['SplitSDKClientKey'] == 'abcd' assert events == [ SSEEvent('1', 'message', '1', 'a'), @@ -91,8 +100,8 @@ def on_disconnect(): """On disconnect handler.""" status['on_disconnect'] = True - client = SplitSSEClient(handler, on_connect, on_disconnect, - base_url='http://localhost:' + str(server.port())) + client = SplitSSEClient(handler, SdkMetadata('1.0', 'some', '1.2.3.4'), on_connect, on_disconnect, + "abcd", base_url='http://localhost:' + str(server.port())) token = Token(True, 'some', {'chan1': ['subscribe'], 'chan2': ['subscribe', 'channel-metadata:publishers']}, 1, 2) @@ -103,6 +112,10 @@ def on_disconnect(): request = request_queue.get(1) assert request.path == '/event-stream?v=1.1&accessToken=some&channels=chan1,[?occupancy=metrics.publishers]chan2' assert request.headers['accept'] == 'text/event-stream' + assert request.headers['SplitSDKVersion'] == '1.0' + assert request.headers['SplitSDKMachineIP'] == '1.2.3.4' + assert request.headers['SplitSDKMachineName'] == 'some' + assert request.headers['SplitSDKClientKey'] == 'abcd' server.publish(SSEMockServer.VIOLENT_REQUEST_END) server.stop() diff --git a/tests/storage/adapters/test_redis_adapter.py b/tests/storage/adapters/test_redis_adapter.py index a159a918..d2bf686f 100644 --- a/tests/storage/adapters/test_redis_adapter.py +++ b/tests/storage/adapters/test_redis_adapter.py @@ -90,7 +90,6 @@ def test_adapter_building(self, mocker): 'redisUnixSocketPath': '/tmp/socket', 'redisEncoding': 'utf-8', 'redisEncodingErrors': 'strict', - 'redisCharset': 'utf-8', 'redisErrors': 'abc', 'redisDecodeResponses': True, 'redisRetryOnTimeout': True, @@ -117,7 +116,6 @@ def test_adapter_building(self, mocker): unix_socket_path='/tmp/socket', encoding='utf-8', encoding_errors='strict', - charset='utf-8', errors='abc', decode_responses=True, retry_on_timeout=True, @@ -142,7 +140,6 @@ def test_adapter_building(self, mocker): 'redisUnixSocketPath': '/tmp/socket', 'redisEncoding': 'utf-8', 'redisEncodingErrors': 'strict', - 'redisCharset': 'utf-8', 'redisErrors': 'abc', 'redisDecodeResponses': True, 'redisRetryOnTimeout': True, diff --git a/tests/sync/test_manager.py b/tests/sync/test_manager.py index 3962a783..fbe98ba4 100644 --- a/tests/sync/test_manager.py +++ b/tests/sync/test_manager.py @@ -18,8 +18,11 @@ from splitio.sync.manager import Manager from splitio.storage import SplitStorage + from splitio.api import APIException +from splitio.client.util import SdkMetadata + class ManagerTests(object): """Synchronizer Manager tests.""" @@ -43,14 +46,14 @@ def run(x): mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = Synchronizer(synchronizers, split_tasks) - manager = Manager(threading.Event(), synchronizer, mocker.Mock(), False) + manager = Manager(threading.Event(), synchronizer, mocker.Mock(), False, SdkMetadata('1.0', 'some', '1.2.3.4')) manager.start() # should not throw! def test_start_streaming_false(self, mocker): splits_ready_event = threading.Event() synchronizer = mocker.Mock(spec=Synchronizer) - manager = Manager(splits_ready_event, synchronizer, mocker.Mock(), False) + manager = Manager(splits_ready_event, synchronizer, mocker.Mock(), False, SdkMetadata('1.0', 'some', '1.2.3.4')) manager.start() splits_ready_event.wait(2) From 6bdb1df84d0577ad94e1ee94ed195a3aca21d0a6 Mon Sep 17 00:00:00 2001 From: Matias Melograno Date: Mon, 3 May 2021 10:18:38 -0300 Subject: [PATCH 17/17] prep release --- CHANGES.txt | 4 ++-- splitio/version.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 4cf6037b..2a5076d5 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,10 +1,10 @@ -9.0.0 (Apr 30, 2021) +9.0.0 (May 3, 2021) - BREAKING CHANGE: Removed splitSdkMachineIp and splitSdkMachineName configs. + - BREAKING CHANGE: Deprecated `redisCharset` config. - BREAKING CHANGE: Deprecated uWSGI local cache. - BREAKING CHANGE: Deprecated Python2 support. - Removed six, future and futures libs for compatibility between Python2 and Python3. - Updated strings encoding to utf-8 by default for Redis. - - BREAKING CHANGE: Deprecated `redisCharset` config. - Added SDK Metadata headers to streaming client. 8.4.1 (Apr 16, 2021) diff --git a/splitio/version.py b/splitio/version.py index e2921057..33de8d16 100644 --- a/splitio/version.py +++ b/splitio/version.py @@ -1 +1 @@ -__version__ = '9.0.0-rc1' +__version__ = '9.0.0'