diff --git a/CHANGELOG.md b/CHANGELOG.md index 0aa714b7..bdd5dd8c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +# 6.0.3 - 2025-07-07 + +- feat: add a feature flag evaluation cache (local storage or redis) to support returning flag evaluations when the service is down + # 6.0.2 - 2025-07-02 - fix: send_feature_flags changed to default to false in `Client::capture_exception` diff --git a/README.md b/README.md index 55e831a9..ffdb6af3 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ We recommend using [uv](https://docs.astral.sh/uv/). It's super fast. ```bash uv python install 3.9.19 uv python pin 3.9.19 -uv venv env +uv venv source env/bin/activate uv sync --extra dev --extra test pre-commit install diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 207d6809..c3e714c6 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -42,3 +42,5 @@ posthog/ai/utils.py:0: error: Function "builtins.any" is not valid as a type [v posthog/ai/utils.py:0: note: Perhaps you meant "typing.Any" instead of "any"? posthog/ai/utils.py:0: error: Function "builtins.any" is not valid as a type [valid-type] posthog/ai/utils.py:0: note: Perhaps you meant "typing.Any" instead of "any"? +posthog/client.py:0: error: Name "urlparse" already defined (possibly by an import) [no-redef] +posthog/client.py:0: error: Name "parse_qs" already defined (possibly by an import) [no-redef] diff --git a/posthog/client.py b/posthog/client.py index 3542cc06..76134faf 100644 --- a/posthog/client.py +++ b/posthog/client.py @@ -50,6 +50,8 @@ to_values, ) from posthog.utils import ( + FlagCache, + RedisFlagCache, SizeLimitedDict, clean, guess_timezone, @@ -95,7 +97,30 @@ def add_context_tags(properties): class Client(object): - """Create a new PostHog client.""" + """Create a new PostHog client. + + Examples: + Basic usage: + >>> client = Client("your-api-key") + + With memory-based feature flag fallback cache: + >>> client = Client( + ... "your-api-key", + ... flag_fallback_cache_url="memory://local/?ttl=300&size=10000" + ... ) + + With Redis fallback cache for high-scale applications: + >>> client = Client( + ... "your-api-key", + ... flag_fallback_cache_url="redis://localhost:6379/0/?ttl=300" + ... ) + + With Redis authentication: + >>> client = Client( + ... "your-api-key", + ... flag_fallback_cache_url="redis://username:password@localhost:6379/0/?ttl=300" + ... ) + """ log = logging.getLogger("posthog") @@ -126,6 +151,7 @@ def __init__( project_root=None, privacy_mode=False, before_send=None, + flag_fallback_cache_url=None, ): self.queue = queue.Queue(max_queue_size) @@ -151,6 +177,8 @@ def __init__( ) self.poller = None self.distinct_ids_feature_flags_reported = SizeLimitedDict(MAX_DICT_SIZE, set) + self.flag_cache = self._initialize_flag_cache(flag_fallback_cache_url) + self.flag_definition_version = 0 self.disabled = disabled self.disable_geoip = disable_geoip self.historical_migration = historical_migration @@ -707,6 +735,9 @@ def shutdown(self): def _load_feature_flags(self): try: + # Store old flags to detect changes + old_flags_by_key: dict[str, dict] = self.feature_flags_by_key or {} + response = get( self.personal_api_key, f"/api/feature_flag/local_evaluation/?token={self.api_key}&send_cohorts", @@ -718,6 +749,14 @@ def _load_feature_flags(self): self.group_type_mapping = response["group_type_mapping"] or {} self.cohorts = response["cohorts"] or {} + # Check if flag definitions changed and update version + if self.flag_cache and old_flags_by_key != ( + self.feature_flags_by_key or {} + ): + old_version = self.flag_definition_version + self.flag_definition_version += 1 + self.flag_cache.invalidate_version(old_version) + except APIError as e: if e.status == 401: self.log.error( @@ -739,6 +778,10 @@ def _load_feature_flags(self): self.group_type_mapping = {} self.cohorts = {} + # Clear flag cache when quota limited + if self.flag_cache: + self.flag_cache.clear() + if self.debug: raise APIError( status=402, @@ -889,6 +932,12 @@ def _get_feature_flag_result( flag_result = FeatureFlagResult.from_value_and_payload( key, lookup_match_value, payload ) + + # Cache successful local evaluation + if self.flag_cache and flag_result: + self.flag_cache.set_cached_flag( + distinct_id, key, flag_result, self.flag_definition_version + ) elif not only_evaluate_locally: try: flag_details, request_id = self._get_feature_flag_details_from_decide( @@ -902,12 +951,30 @@ def _get_feature_flag_result( flag_result = FeatureFlagResult.from_flag_details( flag_details, override_match_value ) + + # Cache successful remote evaluation + if self.flag_cache and flag_result: + self.flag_cache.set_cached_flag( + distinct_id, key, flag_result, self.flag_definition_version + ) + self.log.debug( f"Successfully computed flag remotely: #{key} -> #{flag_result}" ) except Exception as e: self.log.exception(f"[FEATURE FLAGS] Unable to get flag remotely: {e}") + # Fallback to cached value if remote evaluation fails + if self.flag_cache: + stale_result = self.flag_cache.get_stale_cached_flag( + distinct_id, key + ) + if stale_result: + self.log.info( + f"[FEATURE FLAGS] Using stale cached value for flag {key}" + ) + flag_result = stale_result + if send_feature_flag_events: self._capture_feature_flag_called( distinct_id, @@ -1278,6 +1345,99 @@ def _get_all_flags_and_payloads_locally( "featureFlagPayloads": payloads, }, fallback_to_decide + def _initialize_flag_cache(self, cache_url): + """Initialize feature flag cache for graceful degradation during service outages. + + When enabled, the cache stores flag evaluation results and serves them as fallback + when the PostHog API is unavailable. This ensures your application continues to + receive flag values even during outages. + + Args: + cache_url: Cache configuration URL. Examples: + - None: Disable caching + - "memory://local/?ttl=300&size=10000": Memory cache with TTL and size + - "redis://localhost:6379/0/?ttl=300": Redis cache with TTL + - "redis://username:password@host:port/?ttl=300": Redis with auth + + Example usage: + # Memory cache + client = Client( + "your-api-key", + flag_fallback_cache_url="memory://local/?ttl=300&size=10000" + ) + + # Redis cache + client = Client( + "your-api-key", + flag_fallback_cache_url="redis://localhost:6379/0/?ttl=300" + ) + + # Normal evaluation - cache is populated + flag_value = client.get_feature_flag("my-flag", "user123") + + # During API outage - returns cached value instead of None + flag_value = client.get_feature_flag("my-flag", "user123") # Uses cache + """ + if not cache_url: + return None + + try: + from urllib.parse import urlparse, parse_qs + except ImportError: + from urlparse import urlparse, parse_qs + + try: + parsed = urlparse(cache_url) + scheme = parsed.scheme.lower() + query_params = parse_qs(parsed.query) + ttl = int(query_params.get("ttl", [300])[0]) + + if scheme == "memory": + size = int(query_params.get("size", [10000])[0]) + return FlagCache(size, ttl) + + elif scheme == "redis": + try: + # Not worth importing redis if we're not using it + import redis + + redis_url = f"{parsed.scheme}://" + if parsed.username or parsed.password: + redis_url += f"{parsed.username or ''}:{parsed.password or ''}@" + redis_url += ( + f"{parsed.hostname or 'localhost'}:{parsed.port or 6379}" + ) + if parsed.path: + redis_url += parsed.path + + client = redis.from_url(redis_url) + + # Test connection before using it + client.ping() + + return RedisFlagCache(client, default_ttl=ttl) + + except ImportError: + self.log.warning( + "[FEATURE FLAGS] Redis not available, flag caching disabled" + ) + return None + except Exception as e: + self.log.warning( + f"[FEATURE FLAGS] Redis connection failed: {e}, flag caching disabled" + ) + return None + else: + raise ValueError( + f"Unknown cache URL scheme: {scheme}. Supported schemes: memory, redis" + ) + + except Exception as e: + self.log.warning( + f"[FEATURE FLAGS] Failed to parse cache URL '{cache_url}': {e}" + ) + return None + def feature_flag_definitions(self): return self.feature_flags diff --git a/posthog/test/test_utils.py b/posthog/test/test_utils.py index 747f1f99..36e0ed83 100644 --- a/posthog/test/test_utils.py +++ b/posthog/test/test_utils.py @@ -1,3 +1,4 @@ +import time import unittest from dataclasses import dataclass from datetime import date, datetime, timedelta @@ -12,6 +13,7 @@ from pydantic.v1 import BaseModel as BaseModelV1 from posthog import utils +from posthog.types import FeatureFlagResult TEST_API_KEY = "kOOlRy2QlMY9jHZQv0bKz0FZyazBUoY8Arj0lFVNjs4" FAKE_TEST_API_KEY = "random_key" @@ -173,3 +175,124 @@ class TestDataClass: "inner_optional": None, }, } + + +class TestFlagCache(unittest.TestCase): + def setUp(self): + self.cache = utils.FlagCache(max_size=3, default_ttl=1) + self.flag_result = FeatureFlagResult.from_value_and_payload( + "test-flag", True, None + ) + + def test_cache_basic_operations(self): + distinct_id = "user123" + flag_key = "test-flag" + flag_version = 1 + + # Test cache miss + result = self.cache.get_cached_flag(distinct_id, flag_key, flag_version) + assert result is None + + # Test cache set and hit + self.cache.set_cached_flag( + distinct_id, flag_key, self.flag_result, flag_version + ) + result = self.cache.get_cached_flag(distinct_id, flag_key, flag_version) + assert result is not None + assert result.get_value() + + def test_cache_ttl_expiration(self): + distinct_id = "user123" + flag_key = "test-flag" + flag_version = 1 + + # Set flag in cache + self.cache.set_cached_flag( + distinct_id, flag_key, self.flag_result, flag_version + ) + + # Should be available immediately + result = self.cache.get_cached_flag(distinct_id, flag_key, flag_version) + assert result is not None + + # Wait for TTL to expire (1 second + buffer) + time.sleep(1.1) + + # Should be expired + result = self.cache.get_cached_flag(distinct_id, flag_key, flag_version) + assert result is None + + def test_cache_version_invalidation(self): + distinct_id = "user123" + flag_key = "test-flag" + old_version = 1 + new_version = 2 + + # Set flag with old version + self.cache.set_cached_flag(distinct_id, flag_key, self.flag_result, old_version) + + # Should hit with old version + result = self.cache.get_cached_flag(distinct_id, flag_key, old_version) + assert result is not None + + # Should miss with new version + result = self.cache.get_cached_flag(distinct_id, flag_key, new_version) + assert result is None + + # Invalidate old version + self.cache.invalidate_version(old_version) + + # Should miss even with old version after invalidation + result = self.cache.get_cached_flag(distinct_id, flag_key, old_version) + assert result is None + + def test_stale_cache_functionality(self): + distinct_id = "user123" + flag_key = "test-flag" + flag_version = 1 + + # Set flag in cache + self.cache.set_cached_flag( + distinct_id, flag_key, self.flag_result, flag_version + ) + + # Wait for TTL to expire + time.sleep(1.1) + + # Should not get fresh cache + result = self.cache.get_cached_flag(distinct_id, flag_key, flag_version) + assert result is None + + # Should get stale cache (within 1 hour default) + stale_result = self.cache.get_stale_cached_flag(distinct_id, flag_key) + assert stale_result is not None + assert stale_result.get_value() + + def test_lru_eviction(self): + # Cache has max_size=3, so adding 4 users should evict the LRU one + flag_version = 1 + + # Add 3 users + for i in range(3): + user_id = f"user{i}" + self.cache.set_cached_flag( + user_id, "test-flag", self.flag_result, flag_version + ) + + # Access user0 to make it recently used + self.cache.get_cached_flag("user0", "test-flag", flag_version) + + # Add 4th user, should evict user1 (least recently used) + self.cache.set_cached_flag("user3", "test-flag", self.flag_result, flag_version) + + # user0 should still be there (was recently accessed) + result = self.cache.get_cached_flag("user0", "test-flag", flag_version) + assert result is not None + + # user2 should still be there (was recently added) + result = self.cache.get_cached_flag("user2", "test-flag", flag_version) + assert result is not None + + # user3 should be there (just added) + result = self.cache.get_cached_flag("user3", "test-flag", flag_version) + assert result is not None diff --git a/posthog/utils.py b/posthog/utils.py index 261b8b7e..37f4a136 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -1,6 +1,8 @@ +import json import logging import numbers import re +import time from collections import defaultdict from dataclasses import asdict, is_dataclass from datetime import date, datetime, timezone @@ -157,6 +159,266 @@ def __setitem__(self, key, value): super().__setitem__(key, value) +class FlagCacheEntry: + def __init__(self, flag_result, flag_definition_version, timestamp=None): + self.flag_result = flag_result + self.flag_definition_version = flag_definition_version + self.timestamp = timestamp or time.time() + + def is_valid(self, current_time, ttl, current_flag_version): + time_valid = (current_time - self.timestamp) < ttl + version_valid = self.flag_definition_version == current_flag_version + return time_valid and version_valid + + def is_stale_but_usable(self, current_time, max_stale_age=3600): + return (current_time - self.timestamp) < max_stale_age + + +class FlagCache: + def __init__(self, max_size=10000, default_ttl=300): + self.cache = {} # distinct_id -> {flag_key: FlagCacheEntry} + self.access_times = {} # distinct_id -> last_access_time + self.max_size = max_size + self.default_ttl = default_ttl + + def get_cached_flag(self, distinct_id, flag_key, current_flag_version): + current_time = time.time() + + if distinct_id not in self.cache: + return None + + user_flags = self.cache[distinct_id] + if flag_key not in user_flags: + return None + + entry = user_flags[flag_key] + if entry.is_valid(current_time, self.default_ttl, current_flag_version): + self.access_times[distinct_id] = current_time + return entry.flag_result + + return None + + def get_stale_cached_flag(self, distinct_id, flag_key, max_stale_age=3600): + current_time = time.time() + + if distinct_id not in self.cache: + return None + + user_flags = self.cache[distinct_id] + if flag_key not in user_flags: + return None + + entry = user_flags[flag_key] + if entry.is_stale_but_usable(current_time, max_stale_age): + return entry.flag_result + + return None + + def set_cached_flag( + self, distinct_id, flag_key, flag_result, flag_definition_version + ): + current_time = time.time() + + # Evict LRU users if we're at capacity + if distinct_id not in self.cache and len(self.cache) >= self.max_size: + self._evict_lru() + + # Initialize user cache if needed + if distinct_id not in self.cache: + self.cache[distinct_id] = {} + + # Store the flag result + self.cache[distinct_id][flag_key] = FlagCacheEntry( + flag_result, flag_definition_version, current_time + ) + self.access_times[distinct_id] = current_time + + def invalidate_version(self, old_version): + users_to_remove = [] + + for distinct_id, user_flags in self.cache.items(): + flags_to_remove = [] + for flag_key, entry in user_flags.items(): + if entry.flag_definition_version == old_version: + flags_to_remove.append(flag_key) + + # Remove invalidated flags + for flag_key in flags_to_remove: + del user_flags[flag_key] + + # Remove user entirely if no flags remain + if not user_flags: + users_to_remove.append(distinct_id) + + # Clean up empty users + for distinct_id in users_to_remove: + del self.cache[distinct_id] + if distinct_id in self.access_times: + del self.access_times[distinct_id] + + def _evict_lru(self): + if not self.access_times: + return + + # Remove 20% of least recently used entries + sorted_users = sorted(self.access_times.items(), key=lambda x: x[1]) + to_remove = max(1, len(sorted_users) // 5) + + for distinct_id, _ in sorted_users[:to_remove]: + if distinct_id in self.cache: + del self.cache[distinct_id] + if distinct_id in self.access_times: + del self.access_times[distinct_id] + + def clear(self): + self.cache.clear() + self.access_times.clear() + + +class RedisFlagCache: + def __init__( + self, redis_client, default_ttl=300, stale_ttl=3600, key_prefix="posthog:flags:" + ): + self.redis = redis_client + self.default_ttl = default_ttl + self.stale_ttl = stale_ttl + self.key_prefix = key_prefix + self.version_key = f"{key_prefix}version" + + def _get_cache_key(self, distinct_id, flag_key): + return f"{self.key_prefix}{distinct_id}:{flag_key}" + + def _serialize_entry(self, flag_result, flag_definition_version, timestamp=None): + if timestamp is None: + timestamp = time.time() + + # Use clean to make flag_result JSON-serializable for cross-platform compatibility + serialized_result = clean(flag_result) + + entry = { + "flag_result": serialized_result, + "flag_version": flag_definition_version, + "timestamp": timestamp, + } + return json.dumps(entry) + + def _deserialize_entry(self, data): + try: + entry = json.loads(data) + flag_result = entry["flag_result"] + return FlagCacheEntry( + flag_result=flag_result, + flag_definition_version=entry["flag_version"], + timestamp=entry["timestamp"], + ) + except (json.JSONDecodeError, KeyError, ValueError): + # If deserialization fails, treat as cache miss + return None + + def get_cached_flag(self, distinct_id, flag_key, current_flag_version): + try: + cache_key = self._get_cache_key(distinct_id, flag_key) + data = self.redis.get(cache_key) + + if data: + entry = self._deserialize_entry(data) + if entry and entry.is_valid( + time.time(), self.default_ttl, current_flag_version + ): + return entry.flag_result + + return None + except Exception: + # Redis error - return None to fall back to normal evaluation + return None + + def get_stale_cached_flag(self, distinct_id, flag_key, max_stale_age=None): + try: + if max_stale_age is None: + max_stale_age = self.stale_ttl + + cache_key = self._get_cache_key(distinct_id, flag_key) + data = self.redis.get(cache_key) + + if data: + entry = self._deserialize_entry(data) + if entry and entry.is_stale_but_usable(time.time(), max_stale_age): + return entry.flag_result + + return None + except Exception: + # Redis error - return None + return None + + def set_cached_flag( + self, distinct_id, flag_key, flag_result, flag_definition_version + ): + try: + cache_key = self._get_cache_key(distinct_id, flag_key) + serialized_entry = self._serialize_entry( + flag_result, flag_definition_version + ) + + # Set with TTL for automatic cleanup (use stale_ttl for total lifetime) + self.redis.setex(cache_key, self.stale_ttl, serialized_entry) + + # Update the current version + self.redis.set(self.version_key, flag_definition_version) + + except Exception: + # Redis error - silently fail, don't break flag evaluation + pass + + def invalidate_version(self, old_version): + try: + # For Redis, we use a simple approach: scan for keys with old version + # and delete them. This could be expensive with many keys, but it's + # necessary for correctness. + + cursor = 0 + pattern = f"{self.key_prefix}*" + + while True: + cursor, keys = self.redis.scan(cursor, match=pattern, count=100) + + for key in keys: + if key.decode() == self.version_key: + continue + + try: + data = self.redis.get(key) + if data: + entry_dict = json.loads(data) + if entry_dict.get("flag_version") == old_version: + self.redis.delete(key) + except (json.JSONDecodeError, KeyError): + # If we can't parse the entry, delete it to be safe + self.redis.delete(key) + + if cursor == 0: + break + + except Exception: + # Redis error - silently fail + pass + + def clear(self): + try: + # Delete all keys matching our pattern + cursor = 0 + pattern = f"{self.key_prefix}*" + + while True: + cursor, keys = self.redis.scan(cursor, match=pattern, count=100) + if keys: + self.redis.delete(*keys) + if cursor == 0: + break + except Exception: + # Redis error - silently fail + pass + + def convert_to_datetime_aware(date_obj): if date_obj.tzinfo is None: date_obj = date_obj.replace(tzinfo=timezone.utc) diff --git a/posthog/version.py b/posthog/version.py index 54bb7a33..e9b3c98a 100644 --- a/posthog/version.py +++ b/posthog/version.py @@ -1,4 +1,4 @@ -VERSION = "6.0.2" +VERSION = "6.0.3" if __name__ == "__main__": print(VERSION, end="") # noqa: T201 diff --git a/uv.lock b/uv.lock index 6aba1c63..64df719f 100644 --- a/uv.lock +++ b/uv.lock @@ -482,7 +482,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/fe/c8/a2a376a8711c1e11708b9c9972e0c3223f5fc682552c82d8db844393d6ce/cryptography-45.0.4.tar.gz", hash = "sha256:7405ade85c83c37682c8fe65554759800a4a8c54b2d96e0f8ad114d31b808d57", size = 744890 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/1c/92637793de053832523b410dbe016d3f5c11b41d0cf6eef8787aabb51d41/cryptography-45.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:425a9a6ac2823ee6e46a76a21a4e8342d8fa5c01e08b823c1f19a8b74f096069", size = 7055712 }, { url = "https://files.pythonhosted.org/packages/ba/14/93b69f2af9ba832ad6618a03f8a034a5851dc9a3314336a3d71c252467e1/cryptography-45.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:680806cf63baa0039b920f4976f5f31b10e772de42f16310a6839d9f21a26b0d", size = 4205335 }, { url = "https://files.pythonhosted.org/packages/67/30/fae1000228634bf0b647fca80403db5ca9e3933b91dd060570689f0bd0f7/cryptography-45.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4ca0f52170e821bc8da6fc0cc565b7bb8ff8d90d36b5e9fdd68e8a86bdf72036", size = 4431487 }, { url = "https://files.pythonhosted.org/packages/6d/5a/7dffcf8cdf0cb3c2430de7404b327e3db64735747d641fc492539978caeb/cryptography-45.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f3fe7a5ae34d5a414957cc7f457e2b92076e72938423ac64d215722f6cf49a9e", size = 4208922 }, @@ -492,9 +491,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/db/b7/a84bdcd19d9c02ec5807f2ec2d1456fd8451592c5ee353816c09250e3561/cryptography-45.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:2882338b2a6e0bd337052e8b9007ced85c637da19ef9ecaf437744495c8c2999", size = 4463623 }, { url = "https://files.pythonhosted.org/packages/d8/84/69707d502d4d905021cac3fb59a316344e9f078b1da7fb43ecde5e10840a/cryptography-45.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:23b9c3ea30c3ed4db59e7b9619272e94891f8a3a5591d0b656a7582631ccf750", size = 4332447 }, { url = "https://files.pythonhosted.org/packages/f3/ee/d4f2ab688e057e90ded24384e34838086a9b09963389a5ba6854b5876598/cryptography-45.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0a97c927497e3bc36b33987abb99bf17a9a175a19af38a892dc4bbb844d7ee2", size = 4572830 }, - { url = "https://files.pythonhosted.org/packages/70/d4/994773a261d7ff98034f72c0e8251fe2755eac45e2265db4c866c1c6829c/cryptography-45.0.4-cp311-abi3-win32.whl", hash = "sha256:e00a6c10a5c53979d6242f123c0a97cff9f3abed7f064fc412c36dc521b5f257", size = 2932769 }, - { url = "https://files.pythonhosted.org/packages/5a/42/c80bd0b67e9b769b364963b5252b17778a397cefdd36fa9aa4a5f34c599a/cryptography-45.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:817ee05c6c9f7a69a16200f0c90ab26d23a87701e2a284bd15156783e46dbcc8", size = 3410441 }, - { url = "https://files.pythonhosted.org/packages/ce/0b/2488c89f3a30bc821c9d96eeacfcab6ff3accc08a9601ba03339c0fd05e5/cryptography-45.0.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:964bcc28d867e0f5491a564b7debb3ffdd8717928d315d12e0d7defa9e43b723", size = 7031836 }, { url = "https://files.pythonhosted.org/packages/fe/51/8c584ed426093aac257462ae62d26ad61ef1cbf5b58d8b67e6e13c39960e/cryptography-45.0.4-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6a5bf57554e80f75a7db3d4b1dacaa2764611ae166ab42ea9a72bcdb5d577637", size = 4195746 }, { url = "https://files.pythonhosted.org/packages/5c/7d/4b0ca4d7af95a704eef2f8f80a8199ed236aaf185d55385ae1d1610c03c2/cryptography-45.0.4-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:46cf7088bf91bdc9b26f9c55636492c1cce3e7aaf8041bbf0243f5e5325cfb2d", size = 4424456 }, { url = "https://files.pythonhosted.org/packages/1d/45/5fabacbc6e76ff056f84d9f60eeac18819badf0cefc1b6612ee03d4ab678/cryptography-45.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7bedbe4cc930fa4b100fc845ea1ea5788fcd7ae9562e669989c11618ae8d76ee", size = 4198495 }, @@ -504,20 +500,14 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/c0/85fa358ddb063ec588aed4a6ea1df57dc3e3bc1712d87c8fa162d02a65fc/cryptography-45.0.4-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:06509dc70dd71fa56eaa138336244e2fbaf2ac164fc9b5e66828fccfd2b680d6", size = 4451442 }, { url = "https://files.pythonhosted.org/packages/33/67/362d6ec1492596e73da24e669a7fbbaeb1c428d6bf49a29f7a12acffd5dc/cryptography-45.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5f31e6b0a5a253f6aa49be67279be4a7e5a4ef259a9f33c69f7d1b1191939872", size = 4325038 }, { url = "https://files.pythonhosted.org/packages/53/75/82a14bf047a96a1b13ebb47fb9811c4f73096cfa2e2b17c86879687f9027/cryptography-45.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:944e9ccf67a9594137f942d5b52c8d238b1b4e46c7a0c2891b7ae6e01e7c80a4", size = 4560964 }, - { url = "https://files.pythonhosted.org/packages/cd/37/1a3cba4c5a468ebf9b95523a5ef5651244693dc712001e276682c278fc00/cryptography-45.0.4-cp37-abi3-win32.whl", hash = "sha256:c22fe01e53dc65edd1945a2e6f0015e887f84ced233acecb64b4daadb32f5c97", size = 2924557 }, - { url = "https://files.pythonhosted.org/packages/2a/4b/3256759723b7e66380397d958ca07c59cfc3fb5c794fb5516758afd05d41/cryptography-45.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:627ba1bc94f6adf0b0a2e35d87020285ead22d9f648c7e75bb64f367375f3b22", size = 3395508 }, - { url = "https://files.pythonhosted.org/packages/16/33/b38e9d372afde56906a23839302c19abdac1c505bfb4776c1e4b07c3e145/cryptography-45.0.4-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a77c6fb8d76e9c9f99f2f3437c1a4ac287b34eaf40997cfab1e9bd2be175ac39", size = 3580103 }, { url = "https://files.pythonhosted.org/packages/c4/b9/357f18064ec09d4807800d05a48f92f3b369056a12f995ff79549fbb31f1/cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7aad98a25ed8ac917fdd8a9c1e706e5a0956e06c498be1f713b61734333a4507", size = 4143732 }, { url = "https://files.pythonhosted.org/packages/c4/9c/7f7263b03d5db329093617648b9bd55c953de0b245e64e866e560f9aac07/cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3530382a43a0e524bc931f187fc69ef4c42828cf7d7f592f7f249f602b5a4ab0", size = 4385424 }, { url = "https://files.pythonhosted.org/packages/a6/5a/6aa9d8d5073d5acc0e04e95b2860ef2684b2bd2899d8795fc443013e263b/cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:6b613164cb8425e2f8db5849ffb84892e523bf6d26deb8f9bb76ae86181fa12b", size = 4142438 }, { url = "https://files.pythonhosted.org/packages/42/1c/71c638420f2cdd96d9c2b287fec515faf48679b33a2b583d0f1eda3a3375/cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:96d4819e25bf3b685199b304a0029ce4a3caf98947ce8a066c9137cc78ad2c58", size = 4384622 }, - { url = "https://files.pythonhosted.org/packages/ef/ab/e3a055c34e97deadbf0d846e189237d3385dca99e1a7e27384c3b2292041/cryptography-45.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b97737a3ffbea79eebb062eb0d67d72307195035332501722a9ca86bab9e3ab2", size = 3328911 }, - { url = "https://files.pythonhosted.org/packages/ea/ba/cf442ae99ef363855ed84b39e0fb3c106ac66b7a7703f3c9c9cfe05412cb/cryptography-45.0.4-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4828190fb6c4bcb6ebc6331f01fe66ae838bb3bd58e753b59d4b22eb444b996c", size = 3590512 }, { url = "https://files.pythonhosted.org/packages/28/9a/a7d5bb87d149eb99a5abdc69a41e4e47b8001d767e5f403f78bfaafc7aa7/cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:03dbff8411206713185b8cebe31bc5c0eb544799a50c09035733716b386e61a4", size = 4146899 }, { url = "https://files.pythonhosted.org/packages/17/11/9361c2c71c42cc5c465cf294c8030e72fb0c87752bacbd7a3675245e3db3/cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51dfbd4d26172d31150d84c19bbe06c68ea4b7f11bbc7b3a5e146b367c311349", size = 4388900 }, { url = "https://files.pythonhosted.org/packages/c0/76/f95b83359012ee0e670da3e41c164a0c256aeedd81886f878911581d852f/cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:0339a692de47084969500ee455e42c58e449461e0ec845a34a6a9b9bf7df7fb8", size = 4146422 }, { url = "https://files.pythonhosted.org/packages/09/ad/5429fcc4def93e577a5407988f89cf15305e64920203d4ac14601a9dc876/cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:0cf13c77d710131d33e63626bd55ae7c0efb701ebdc2b3a7952b9b23a0412862", size = 4388475 }, - { url = "https://files.pythonhosted.org/packages/99/49/0ab9774f64555a1b50102757811508f5ace451cf5dc0a2d074a4b9deca6a/cryptography-45.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:bbc505d1dc469ac12a0a064214879eac6294038d6b24ae9f71faae1448a9608d", size = 3337594 }, ] [[package]] @@ -609,7 +599,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.12.4'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749 } wheels = [ @@ -923,7 +913,7 @@ name = "importlib-metadata" version = "8.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "zipp", marker = "python_full_version < '3.13'" }, + { name = "zipp", marker = "python_full_version < '3.12.4'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641 } wheels = [ @@ -1973,6 +1963,7 @@ dependencies = [ { name = "python-dateutil" }, { name = "requests" }, { name = "six" }, + { name = "typing-extensions" }, ] [package.optional-dependencies] @@ -2049,7 +2040,7 @@ requires-dist = [ { name = "pytest-asyncio", marker = "extra == 'test'" }, { name = "pytest-timeout", marker = "extra == 'test'" }, { name = "python-dateutil", specifier = ">=2.2" }, - { name = "requests", specifier = "<3.0,>=2.7" }, + { name = "requests", specifier = ">=2.7,<3.0" }, { name = "ruff", marker = "extra == 'dev'" }, { name = "setuptools", marker = "extra == 'dev'" }, { name = "six", specifier = ">=1.5" }, @@ -2061,6 +2052,7 @@ requires-dist = [ { name = "types-requests", marker = "extra == 'dev'" }, { name = "types-setuptools", marker = "extra == 'dev'" }, { name = "types-six", marker = "extra == 'dev'" }, + { name = "typing-extensions", specifier = ">=4.2.0" }, { name = "wheel", marker = "extra == 'dev'" }, ]