Skip to content

Commit

Permalink
refactor: remove RemovalCause._IGNORE
Browse files Browse the repository at this point in the history
  • Loading branch information
uncle-lv committed Nov 15, 2023
1 parent 6bd8578 commit 73b54ee
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 12 deletions.
2 changes: 1 addition & 1 deletion src/cacheout/__init__.py
Expand Up @@ -2,7 +2,7 @@

__version__ = "0.15.0"

from .cache import Cache, RemovalCause
from .cache import UNSET, Cache, RemovalCause
from .fifo import FIFOCache
from .lfu import LFUCache
from .lifo import LIFOCache
Expand Down
8 changes: 3 additions & 5 deletions src/cacheout/cache.py
Expand Up @@ -61,14 +61,12 @@ class RemovalCause(Enum):
FULL: indicates that the cache entry was removed because cache has been full (reached the
maximum size limit).
POPITEM: indicates that the cache entry was deleted by popitem().
_IGNORE: It's an internal member indicates you don't want to call on_delete callback.
"""

DELETE = auto()
EXPIRED = auto()
FULL = auto()
POPITEM = auto()
_IGNORE = auto()


class Cache:
Expand Down Expand Up @@ -400,7 +398,7 @@ def _set(self, key: t.Hashable, value: t.Any, ttl: t.Optional[T_TTL] = None) ->

# Delete key before setting it so that it moves to the end of the OrderedDict key list.
# Needed for cache strategies that rely on the ordering of when keys were last inserted.
self._delete(key, RemovalCause._IGNORE)
self._delete(key)
self._cache[key] = value

if ttl and ttl > 0:
Expand Down Expand Up @@ -438,13 +436,13 @@ def delete(self, key: t.Hashable) -> int:
with self._lock:
return self._delete(key, RemovalCause.DELETE)

def _delete(self, key: t.Hashable, cause: RemovalCause) -> int:
def _delete(self, key: t.Hashable, cause: t.Optional[RemovalCause] = None) -> int:
count = 0

try:
value = self._cache[key]
del self._cache[key]
if cause != RemovalCause._IGNORE and self.on_delete:
if cause and self.on_delete:
self.on_delete(key, value, cause)
count = 1
if cause == RemovalCause.FULL:
Expand Down
2 changes: 1 addition & 1 deletion src/cacheout/lfu.py
Expand Up @@ -59,7 +59,7 @@ def add(self, key: t.Hashable, value: t.Any, ttl: t.Optional[T_TTL] = None) -> N

add.__doc__ = Cache.add.__doc__

def _delete(self, key: t.Hashable, cause: RemovalCause) -> int:
def _delete(self, key: t.Hashable, cause: t.Optional[RemovalCause] = None) -> int:
count = super()._delete(key, cause)

try:
Expand Down
10 changes: 5 additions & 5 deletions tests/test_cache.py
Expand Up @@ -5,7 +5,7 @@

import pytest

from cacheout import Cache, RemovalCause
from cacheout import UNSET, Cache, RemovalCause


parametrize = pytest.mark.parametrize
Expand Down Expand Up @@ -763,19 +763,19 @@ def on_get(key, value, existed):

def test_cache_on_set(cache: Cache):
"""Test that on_set(cache) callback."""
log = ""
log = {}

def on_set(key, new_value, old_value):
nonlocal log
log = f"{key}={new_value}, old_value={old_value}"
log = {"key": key, "new_value": new_value, "old_value": old_value}

cache.on_set = on_set

cache.set("a", 1)
assert re.match(r"^a=1, old_value=<object object at 0x.*>$", log)
assert log == {"key": "a", "new_value": 1, "old_value": UNSET}

cache.set("a", 2)
assert log == "a=2, old_value=1"
assert log == {"key": "a", "new_value": 2, "old_value": 1}


def test_cache_stats__disabled_by_default(cache: Cache):
Expand Down

0 comments on commit 73b54ee

Please sign in to comment.