Skip to content

Commit

Permalink
Allow sweeping cache without cache_size and with cache_size_bytes only.
Browse files Browse the repository at this point in the history
  • Loading branch information
yuseitahara committed Nov 13, 2018
1 parent 7b7ce08 commit 000501d
Show file tree
Hide file tree
Showing 3 changed files with 105 additions and 3 deletions.
18 changes: 16 additions & 2 deletions persistent/cPickleCache.c
Original file line number Diff line number Diff line change
Expand Up @@ -193,9 +193,23 @@ scan_gc_items(ccobject *self, int target, Py_ssize_t target_bytes)
*/
insert_after(&before_original_home, self->ring_home.r_prev);
here = self->ring_home.r_next; /* least recently used object */
/* All objects should be deactivated when the objects count parameter
* (target) is zero and the size limit parameter in bytes(target_bytes)
* is also zero.
*
* Otherwise the objects should be collect while one of the following
* conditions are True:
* - the ghost count is bigger than the number of objects limit(target).
* - the estimated size in bytes is bigger than the size limit in
* bytes(target_bytes).
*/
while (here != &before_original_home &&
(self->non_ghost_count > target
|| (target_bytes && self->total_estimated_size > target_bytes)
(
(!target && !target_bytes) ||
(
(target && self->non_ghost_count > target) ||
(target_bytes && self->total_estimated_size > target_bytes)
)
)
)
{
Expand Down
5 changes: 4 additions & 1 deletion persistent/picklecache.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,10 @@ def _sweep(self, target, target_size_bytes=0):
i = -1
to_eject = []
for value in self.ring:
if self.non_ghost_count <= target and (self.total_estimated_size <= target_size_bytes or not target_size_bytes):
if ((target or target_size_bytes) and
(not target or self.non_ghost_count <= target) and
(self.total_estimated_size <= target_size_bytes or
not target_size_bytes)):
break
i += 1
if value._p_state == UPTODATE:
Expand Down
85 changes: 85 additions & 0 deletions persistent/tests/test_picklecache.py
Original file line number Diff line number Diff line change
Expand Up @@ -1070,6 +1070,49 @@ def test_new_ghost_obj_already_in_cache(self):
candidate._p_jar = None
self.assertRaises(KeyError, cache.new_ghost, key, candidate)

@with_deterministic_gc
def test_cache_garbage_collection_bytes_with_cache_size_0(
self, force_collect=_is_pypy or _is_jython):

class MyPersistent(self._getDummyPersistentClass()):
def _p_deactivate(self):
# mimic what the real persistent object does to update
# the cache size; if we don't get deactivated by
# sweeping, the cache size won't shrink so this also
# validates that _p_deactivate gets called when
# ejecting an object.
cache.update_object_size_estimation(self._p_oid, -1)

cache = self._makeOne()
cache.cache_size = 0
cache.cache_size_bytes = 400
oids = []
for i in range(100):
oid = self._numbered_oid(i)
oids.append(oid)
o = cache[oid] = self._makePersist(oid=oid,
kind=MyPersistent,
state=UPTODATE)
# must start 0, ZODB sets it AFTER updating the size
o._Persistent__size = 0
cache.update_object_size_estimation(oid, 1)
o._Persistent__size = 1
del o # leave it only in the cache

self.assertEqual(cache.cache_non_ghost_count, 100)
self.assertEqual(cache.total_estimated_size, 64*100)

cache.incrgc()
self.assertEqual(cache.total_estimated_size, 64*6)
self.assertEqual(cache.cache_non_ghost_count, 6)
self.assertEqual(len(cache), 6)

cache.full_sweep()
gc.collect() # banish the ghosts who are no longer in the ring
self.assertEqual(cache.total_estimated_size, 0)
self.assertEqual(cache.cache_non_ghost_count, 0)
self.assertEqual(len(cache), 0)


@skipIfNoCExtension
class CPickleCacheTests(PickleCacheTests):
Expand All @@ -1088,6 +1131,48 @@ def test___setitem___persistent_class(self):
cache = super(CPickleCacheTests, self).test___setitem___persistent_class()
self.assertEqual(_len(cache.items()), 1)

def test_cache_garbage_collection_bytes_with_cache_size_0(self):

class DummyConnection(object):
def register(self, obj):
pass

dummy_connection = DummyConnection()

def makePersistent(oid):
persist = self._getDummyPersistentClass()()
persist._p_oid = oid
persist._p_jar = dummy_connection
return persist

cache = self._getTargetClass()(dummy_connection)
dummy_connection._cache = cache

cache.cache_size = 0
cache.cache_size_bytes = 400

oids = []
for i in range(100):
oid = self._numbered_oid(i)
oids.append(oid)
o = cache[oid] = makePersistent(oid)
cache.update_object_size_estimation(oid, 1)
o._p_estimated_size = 1
del o # leave it only in the cache

self.assertEqual(cache.cache_non_ghost_count, 100)
self.assertEqual(cache.total_estimated_size, 64*100)

cache.incrgc()
self.assertEqual(cache.total_estimated_size, 64*6)
self.assertEqual(cache.cache_non_ghost_count, 6)
self.assertEqual(len(cache), 6)

cache.full_sweep()
gc.collect() # banish the ghosts who are no longer in the ring
self.assertEqual(cache.total_estimated_size, 0)
self.assertEqual(cache.cache_non_ghost_count, 0)
self.assertEqual(len(cache), 0)


class DummyPersistent(object):
Expand Down

0 comments on commit 000501d

Please sign in to comment.