Skip to content

Commit

Permalink
Merge b131e1b into ac840a1
Browse files Browse the repository at this point in the history
  • Loading branch information
jamadden committed Jun 20, 2019
2 parents ac840a1 + b131e1b commit 52ecd47
Show file tree
Hide file tree
Showing 7 changed files with 98 additions and 27 deletions.
4 changes: 2 additions & 2 deletions CHANGES.rst
Expand Up @@ -5,8 +5,8 @@
3.0a3 (unreleased)
==================

- Nothing changed yet.

- Zapping a storage now also removes any persistent cache files. See
:issue:`241`.

3.0a2 (2019-06-19)
==================
Expand Down
5 changes: 5 additions & 0 deletions src/relstorage/cache/interfaces.py
Expand Up @@ -139,6 +139,11 @@ def restore():
Restore the cache from disk.
"""

def zap_all():
"""
Remove the cache from disk.
"""

class ILRUEntry(Interface):
"""
An entry in an `ILRUCache`.
Expand Down
5 changes: 5 additions & 0 deletions src/relstorage/cache/local_client.py
Expand Up @@ -39,6 +39,7 @@
from relstorage.cache.mapping import SizedLRUMapping

from relstorage.cache.persistence import sqlite_connect
from relstorage.cache.persistence import sqlite_files
from relstorage.cache.local_database import Database

logger = __import__('logging').getLogger(__name__)
Expand Down Expand Up @@ -161,6 +162,10 @@ def restore(self, row_filter=None):
with closing(conn):
self.read_from_sqlite(conn, row_filter)

def zap_all(self):
_, destroy = sqlite_files(self.options, self.prefix)
destroy()

@property
def _bucket0(self):
# For testing only.
Expand Down
44 changes: 30 additions & 14 deletions src/relstorage/cache/persistence.py
Expand Up @@ -20,6 +20,7 @@
from __future__ import division
from __future__ import print_function

import errno
import logging
import os
import os.path
Expand Down Expand Up @@ -244,18 +245,10 @@ def _connect_to_file(fname, factory=Connection,

return connection

def sqlite_connect(options, prefix,
overwrite=False,
max_wal_size=DEFAULT_MAX_WAL,
close_async=DEFAULT_CLOSE_ASYNC,
mmap_size=DEFAULT_MMAP_SIZE,
page_size=DEFAULT_PAGE_SIZE,
temp_store=DEFAULT_TEMP_STORE):
def sqlite_files(options, prefix):
"""
Return a DB-API Connection object.
.. caution:: Using the connection as a context manager does **not**
result in the connection being closed, only committed or rolled back.
Calculate the sqlite filename and return it, plus a function that will
destroy the sqlite file.
"""
parent_dir = getattr(options, 'cache_local_dir', options)
# Allow for memory and temporary databases (empty string):
Expand All @@ -270,15 +263,34 @@ def sqlite_connect(options, prefix,

fname = os.path.join(parent_dir, 'relstorage-cache-' + prefix + '.sqlite3')
wal_fname = fname + '-wal'
shm_fname = fname + '-shm'
def destroy():
logger.info("Replacing any existing cache at %s", fname)
__quiet_remove(fname)
__quiet_remove(wal_fname)
__quiet_remove(shm_fname)
else:
fname = parent_dir
wal_fname = None
def destroy():
"Nothing to do."
return fname, destroy


def sqlite_connect(options, prefix,
overwrite=False,
max_wal_size=DEFAULT_MAX_WAL,
close_async=DEFAULT_CLOSE_ASYNC,
mmap_size=DEFAULT_MMAP_SIZE,
page_size=DEFAULT_PAGE_SIZE,
temp_store=DEFAULT_TEMP_STORE):
"""
Return a DB-API Connection object.
.. caution:: Using the connection as a context manager does **not**
result in the connection being closed, only committed or rolled back.
"""
fname, destroy = sqlite_files(options, prefix)

corrupt_db_ex = sqlite3.DatabaseError
if overwrite:
Expand Down Expand Up @@ -318,7 +330,7 @@ def destroy():
connection = _connect_to_file(fname, close_async=close_async,
pragmas=pragmas)
except corrupt_db_ex:
logger.info("Corrupt cache database at %s; replacing", fname)
logger.exception("Corrupt cache database at %s; replacing", fname)
destroy()
connection = _connect_to_file(fname, close_async=close_async,
pragmas=pragmas)
Expand All @@ -328,8 +340,12 @@ def destroy():
def __quiet_remove(path):
try:
os.unlink(path)
except os.error: # pragma: no cover
log.debug("Failed to remove %r", path)
except os.error as e:
# TODO: Use FileNotFoundError on Python 3
log_meth = log.exception
if e.errno == errno.ENOENT:
log_meth = log.debug
log_meth("Failed to remove %r", path)
return False
else:
return True
11 changes: 10 additions & 1 deletion src/relstorage/cache/storage_cache.py
Expand Up @@ -290,7 +290,7 @@ def clear(self, load_persistent=True):
can be used to control this.
.. versionchanged:: 2.0b6 Added the ``load_persistent``
keyword. This argument is provisional.
keyword. This argument is provisional.
"""
self._reset()
# After this our current_tid is probably out of sync with the
Expand All @@ -305,6 +305,15 @@ def clear(self, load_persistent=True):
if load_persistent:
self.restore()

def zap_all(self):
"""
Remove all data from the cache, both locally (and shared among
other instances, and globally); in addition, remove any
persistent cache files on disk.
"""
self.clear(load_persistent=False)
self.local_client.zap_all()

def _check_tid_after_load(self, oid_int, actual_tid_int,
expect_tid_int=None):
"""Verify the tid of an object loaded from the database is sane."""
Expand Down
54 changes: 45 additions & 9 deletions src/relstorage/cache/tests/test_storage_cache.py
Expand Up @@ -109,15 +109,25 @@ def _setup_for_save(self):

return c, oid, tid

def test_save(self):
def assertNoPersistentCache(self, cache):
import os
from relstorage.cache.persistence import sqlite_files
fname, _ = sqlite_files(cache.options, cache.prefix)
if fname:
self.assertFalse(os.path.exists(fname), fname)

def assertPersistentCache(self, cache):
import os
from relstorage.cache.persistence import sqlite_files
fname, _ = sqlite_files(cache.options, cache.prefix)
if fname:
self.assertTrue(os.path.exists(fname), fname)

def test_save(self):
c, oid, tid = self._setup_for_save()
self.assertNoPersistentCache(c)
c.save(overwrite=True, close_async=False)
files = os.listdir(c.options.cache_local_dir)
__traceback_info__ = files
# Older versions of sqlite may leave -shm and -wal
# files around.
self.assertGreaterEqual(len(files), 1)
self.assertPersistentCache(c)

# Creating one in the same place automatically loads it.
c2 = self._makeOne(cache_local_dir=c.options.cache_local_dir)
Expand Down Expand Up @@ -147,12 +157,38 @@ def test_save(self):
self.test_closed_state(c)

def test_save_no_hits_no_sets(self):
import os
c, _, _ = self._setup_for_save()
c.local_client.reset_stats()
c.save(close_async=False)
files = os.listdir(c.options.cache_local_dir)
self.assertEmpty(files)
self.assertNoPersistentCache(c)

def test_zap_all(self):
c, _, _ = self._setup_for_save()
self.assertNoPersistentCache(c)

c.save(overwrite=True, close_async=False)
self.assertPersistentCache(c)

c.zap_all()
self.assertEmpty(c)
self.assertNoPersistentCache(c)

# We can do it again and again
c.zap_all()
self.assertEmpty(c)
self.assertNoPersistentCache(c)

def test_zap_all_no_local_dir(self):
c, _, _ = self._setup_for_save()
self.assertNoPersistentCache(c)
c.options.cache_local_dir = None

c.save(overwrite=True, close_async=False)
self.assertNoPersistentCache(c)

c.zap_all()
self.assertEmpty(c)
self.assertNoPersistentCache(c)

def test_clear(self):
from relstorage.tests.fakecache import data
Expand Down
2 changes: 1 addition & 1 deletion src/relstorage/storage.py
Expand Up @@ -431,7 +431,7 @@ def zap_all(self, **kwargs):
self._adapter.schema.zap_all(**kwargs)
self._drop_load_connection()
self._drop_store_connection()
self._cache.clear(load_persistent=False)
self._cache.zap_all()

def release(self):
"""
Expand Down

0 comments on commit 52ecd47

Please sign in to comment.