Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CLN deprecate bytes_limit from Memory #1447

Merged
merged 2 commits into from Jun 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
11 changes: 11 additions & 0 deletions joblib/memory.py
Expand Up @@ -936,6 +936,10 @@ class Memory(Logger):
**Note:** You need to call :meth:`joblib.Memory.reduce_size` to
actually reduce the cache size to be less than ``bytes_limit``.

**Note:** This argument has been deprecated. One should give the
value of ``bytes_limit`` directly in
:meth:`joblib.Memory.reduce_size`.

backend_options: dict, optional
Contains a dictionary of named parameters used to configure
the store backend.
Expand All @@ -951,6 +955,13 @@ def __init__(self, location=None, backend='local',
self._verbose = verbose
self.mmap_mode = mmap_mode
self.timestamp = time.time()
if bytes_limit is not None:
warnings.warn(
"bytes_limit argument has been deprecated. It will be removed "
"in version 1.5. Please pass its value directly to "
"Memory.reduce_size.",
category=DeprecationWarning
)
self.bytes_limit = bytes_limit
self.backend = backend
self.compress = compress
Expand Down
26 changes: 17 additions & 9 deletions joblib/test/test_memory.py
Expand Up @@ -930,7 +930,8 @@
# All the cache items need to be deleted
bytes_limit_too_small = 500
items_to_delete_500b = memory.store_backend._get_items_to_delete(
bytes_limit_too_small)
bytes_limit_too_small
)
assert set(items_to_delete_500b), set(items)

# Test LRU property: surviving cache items should all have a more
Expand All @@ -953,22 +954,19 @@

# No cache items deleted if bytes_limit greater than the size of
# the cache
memory.bytes_limit = '1M'
memory.reduce_size()
memory.reduce_size(bytes_limit='1M')
cache_items = memory.store_backend.get_items()
assert sorted(ref_cache_items) == sorted(cache_items)

# bytes_limit is set so that only two cache items are kept
memory.bytes_limit = '3K'
memory.reduce_size()
memory.reduce_size(bytes_limit='3K')
cache_items = memory.store_backend.get_items()
assert set.issubset(set(cache_items), set(ref_cache_items))
assert len(cache_items) == 2

# bytes_limit set so that no cache item is kept
bytes_limit_too_small = 500
memory.bytes_limit = bytes_limit_too_small
memory.reduce_size()
memory.reduce_size(bytes_limit=bytes_limit_too_small)
cache_items = memory.store_backend.get_items()
assert cache_items == []

Expand All @@ -994,7 +992,7 @@
assert set.issubset(set(cache_items), set(ref_cache_items))
assert len(cache_items) == 2

# bytes_limit set so that no cache item is kept
# item_limit set so that no cache item is kept
memory.reduce_size(items_limit=0)
cache_items = memory.store_backend.get_items()
assert cache_items == []
Expand Down Expand Up @@ -1336,7 +1334,7 @@

@pytest.mark.parametrize('memory_kwargs',
[{'compress': 3, 'verbose': 2},
{'mmap_mode': 'r', 'verbose': 5, 'bytes_limit': 1e6,
{'mmap_mode': 'r', 'verbose': 5,
'backend_options': {'parameter': 'unused'}}])
def test_memory_pickle_dump_load(tmpdir, memory_kwargs):
memory = Memory(location=tmpdir.strpath, **memory_kwargs)
Expand Down Expand Up @@ -1393,3 +1391,13 @@
_ = f(x)
assert "Querying" not in caplog.text
caplog.clear()


def test_deprecated_bytes_limit(tmpdir):
from joblib import __version__
if __version__ >= "1.5":
raise DeprecationWarning(

Check warning on line 1399 in joblib/test/test_memory.py

View check run for this annotation

Codecov / codecov/patch

joblib/test/test_memory.py#L1399

Added line #L1399 was not covered by tests
"Bytes limit is deprecated and should be removed by 1.4"
)
with pytest.warns(DeprecationWarning, match="bytes_limit"):
_ = Memory(location=tmpdir.strpath, bytes_limit='1K')