Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 0 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,6 @@ repos:
# hooks:
# - id: mypy

- repo: https://github.com/asottile/yesqa
rev: v1.5.0
hooks:
- id: yesqa
additional_dependencies:
- flake8-bandit

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.1
hooks:
Expand Down
4 changes: 2 additions & 2 deletions cachier/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ def func_wrapper(*args, **kwds):
_print = print
if ignore_cache or not _default_params["caching_enabled"]:
return func(**kwargs)
key, entry = core.get_entry(tuple(), kwargs)
key, entry = core.get_entry((), kwargs)
if overwrite_cache:
return _calc_entry(core, key, func, args, kwds)
if entry is None:
Expand Down Expand Up @@ -314,7 +314,7 @@ def _precache_value(*args, value_to_cache, **kwds):
kwargs = _convert_args_kwargs(
func, _is_method=core.func_is_method, args=args, kwds=kwds
)
return core.precache_value(tuple(), kwargs, value_to_cache)
return core.precache_value((), kwargs, value_to_cache)

func_wrapper.clear_cache = _clear_cache
func_wrapper.clear_being_calculated = _clear_being_calculated
Expand Down
5 changes: 3 additions & 2 deletions cachier/cores/mongo.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,9 @@ def __init__(
):
if "pymongo" not in sys.modules:
warnings.warn(
"Cachier warning: pymongo was not found. "
"MongoDB cores will not function."
"`pymongo` was not found. MongoDB cores will not function.",
ImportWarning,
stacklevel=2,
) # pragma: no cover

super().__init__(hash_func, wait_for_calc_timeout)
Expand Down
14 changes: 7 additions & 7 deletions cachier/cores/pickle.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,9 @@ def _get_cache(self):
self._reload_cache()
return self.cache

def _get_cache_by_key(self, key=None, hash=None):
def _get_cache_by_key(self, key=None, hash_str=None):
fpath = self.cache_fpath
fpath += f"_{key}" if hash is None else f"_{hash}"
fpath += f"_{hash_str or key}"
try:
with portalocker.Lock(fpath, mode="rb") as cache_file:
return pickle.load(cache_file) # noqa: S301
Expand All @@ -134,17 +134,17 @@ def _clear_being_calculated_all_cache_files(self):
path, name = os.path.split(self.cache_fpath)
for subpath in os.listdir(path):
if subpath.startswith(name):
entry = self._get_cache_by_key(hash=subpath.split("_")[-1])
entry = self._get_cache_by_key(hash_str=subpath.split("_")[-1])
if entry is not None:
entry["being_calculated"] = False
self._save_cache(entry, hash=subpath.split("_")[-1])
self._save_cache(entry, hash_str=subpath.split("_")[-1])

def _save_cache(self, cache, key=None, hash=None):
def _save_cache(self, cache, key=None, hash_str=None):
fpath = self.cache_fpath
if key is not None:
fpath += f"_{key}"
elif hash is not None:
fpath += f"_{hash}"
elif hash_str is not None:
fpath += f"_{hash_str}"
with self.lock:
self.cache = cache
with portalocker.Lock(fpath, mode="wb") as cache_file:
Expand Down
10 changes: 8 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,16 @@ lint.select = [
"W", # see: https://pypi.org/project/pycodestyle
"F", # see: https://pypi.org/project/pyflakes
"I", #see: https://pypi.org/project/isort/
# "D", # see: https://pypi.org/project/pydocstyle
# "N", # see: https://pypi.org/project/pep8-naming
#"D", # see: https://pypi.org/project/pydocstyle
#"N", # see: https://pypi.org/project/pep8-naming
"S", # see: https://pypi.org/project/flake8-bandit
"SIM",
"RUF100" # alternative to yesqa
]
lint.extend-select = [
"A", # see: https://pypi.org/project/flake8-builtins
"B", # see: https://pypi.org/project/flake8-bugbear
"C4", # see: https://pypi.org/project/flake8-comprehensions
]
lint.ignore = [
"E203",
Expand Down
4 changes: 2 additions & 2 deletions tests/speed_eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,12 @@ def test_separate_files_vs_single_file():
_test_separate_files_speed.clear_cache()
_test_single_file_speed.clear_cache()
start_time = time()
for i in range(3):
for _ in range(3):
for j in range(10):
_test_separate_files_speed(j, 2)
print(f"separate files time: {time() - start_time}")
start_time = time()
for i in range(3):
for _ in range(3):
for j in range(10):
_test_single_file_speed(j, 2)
print(f"single file time: {time() - start_time}")
Expand Down
2 changes: 1 addition & 1 deletion tests/test_general.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,7 @@ def test_list_inputs():
count = 0

@cachier.cachier()
def dummy_func(a: list, b: list = [2]):
def dummy_func(a: list, b: list = [2]): # noqa: B006
nonlocal count
count += 1
return a + b
Expand Down
4 changes: 2 additions & 2 deletions tests/test_memory_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,8 +314,8 @@ def _params_with_dataframe(*args, **kwargs):

_params_with_dataframe.clear_cache()

df_a = pd.DataFrame.from_dict(dict(a=[0], b=[2], c=[3]))
df_b = pd.DataFrame.from_dict(dict(a=[0], b=[2], c=[3]))
df_a = pd.DataFrame.from_dict({"a": [0], "b": [2], "c": [3]})
df_b = pd.DataFrame.from_dict({"a": [0], "b": [2], "c": [3]})
value_a = _params_with_dataframe(df_a, 1)
value_b = _params_with_dataframe(df_b, 1)

Expand Down
4 changes: 2 additions & 2 deletions tests/test_mongo_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,8 +351,8 @@ def _params_with_dataframe(*args, **kwargs):

_params_with_dataframe.clear_cache()

df_a = pd.DataFrame.from_dict(dict(a=[0], b=[2], c=[3]))
df_b = pd.DataFrame.from_dict(dict(a=[0], b=[2], c=[3]))
df_a = pd.DataFrame.from_dict({"a": [0], "b": [2], "c": [3]})
df_b = pd.DataFrame.from_dict({"a": [0], "b": [2], "c": [3]})
value_a = _params_with_dataframe(df_a, 1)
value_b = _params_with_dataframe(df_b, 1)

Expand Down
8 changes: 4 additions & 4 deletions tests/test_pickle_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@ def test_bad_cache_file(separate_files):
for sleeptime in sleeptimes:
if _helper_bad_cache_file(sleeptime, separate_files):
return
assert False
raise AssertionError()


def _delete_cache(arg_1, arg_2):
Expand Down Expand Up @@ -501,7 +501,7 @@ def test_delete_cache_file(separate_files):
for sleeptime in sleeptimes:
if _helper_delete_cache_file(sleeptime, separate_files):
return
assert False
raise AssertionError()


@pytest.mark.pickle
Expand Down Expand Up @@ -598,8 +598,8 @@ def _params_with_dataframe(*args, **kwargs):

_params_with_dataframe.clear_cache()

df_a = pd.DataFrame.from_dict(dict(a=[0], b=[2], c=[3]))
df_b = pd.DataFrame.from_dict(dict(a=[0], b=[2], c=[3]))
df_a = pd.DataFrame.from_dict({"a": [0], "b": [2], "c": [3]})
df_b = pd.DataFrame.from_dict({"a": [0], "b": [2], "c": [3]})
value_a = _params_with_dataframe(df_a, 1)
value_b = _params_with_dataframe(df_b, 1)

Expand Down