Skip to content

Commit

Permalink
Fix scrubber start & not scrubbing when not daemon
Browse files Browse the repository at this point in the history
1) Scrubber not starting
When the scrubber is run from the command line or via cron, it would try
to call it's app object which resulted in a TypeError.

2) Scrubber not scrubbing
In the same usage scenario as above, the delete jobs added to the
starmap are never started.  A slight delay is enough to start these.

Also add a functional test designed to excercise the scrubber when run
in "regular mode", which covers both these cases.  I've run the test
successfully with 1000 images, which should be enough to prove the
sleep(0.1) does the trick.

Fixes bug 1052537.

Change-Id: If29704d8a425194085b59b7dfd265ffaef16302c
  • Loading branch information
Paul Bourke authored and ttx committed Sep 20, 2012
1 parent b0df897 commit c417382
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 2 deletions.
2 changes: 1 addition & 1 deletion bin/glance-scrubber
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,6 @@ if __name__ == '__main__':
else:
import eventlet
pool = eventlet.greenpool.GreenPool(1000)
scrubber = app().run(pool)
scrubber = app.run(pool)
except RuntimeError, e:
sys.exit("ERROR: %s" % e)
3 changes: 3 additions & 0 deletions glance/store/scrubber.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,9 @@ def run(self, pool, event=None):

LOG.info(_("Deleting %s images") % len(delete_work))
pool.starmap(self._delete, delete_work)
# NOTE(bourke): When not running as a daemon, a slight pause is needed
# to allow the starmap to begin it's work.
eventlet.sleep(0.1)

if self.cleanup:
self._cleanup(pool)
Expand Down
3 changes: 2 additions & 1 deletion glance/tests/functional/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ def __init__(self, test_dir, port, policy_file, delayed_delete=False,
self.delayed_delete = delayed_delete
self.owner_is_tenant = True
self.workers = 0
self.scrub_time = 5
self.image_cache_dir = os.path.join(self.test_dir,
'cache')
self.image_cache_driver = 'sqlite'
Expand Down Expand Up @@ -272,7 +273,7 @@ def __init__(self, test_dir, port, policy_file, delayed_delete=False,
delayed_delete = %(delayed_delete)s
owner_is_tenant = %(owner_is_tenant)s
workers = %(workers)s
scrub_time = 5
scrub_time = %(scrub_time)s
scrubber_datadir = %(scrubber_datadir)s
image_cache_dir = %(image_cache_dir)s
image_cache_driver = %(image_cache_driver)s
Expand Down
52 changes: 52 additions & 0 deletions glance/tests/functional/test_scrubber.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

from glance import client
from glance.registry import client as registry_client
from glance.tests.utils import execute


TEST_IMAGE_DATA = '*' * 5 * 1024
Expand Down Expand Up @@ -125,3 +126,54 @@ def test_delayed_delete(self):
self.assertEqual(rec['status'], 'deleted')

self.stop_servers()

def test_scrubber_app(self):
"""
test that the glance-scrubber script runs successfully when not in
daemon mode
"""
self.cleanup()
self.start_servers(delayed_delete=True, daemon=False)

client = self._get_client()
registry = self._get_registry_client()

# add some images and ensure it was successful
img_ids = []
for i in range(0, 3):
meta = client.add_image(TEST_IMAGE_META, TEST_IMAGE_DATA)
id = meta['id']
img_ids.append(id)
filters = {'deleted': True, 'is_public': 'none',
'status': 'pending_delete'}
recs = registry.get_images_detailed(filters=filters)
self.assertFalse(recs)

# delete those images
for img_id in img_ids:
client.delete_image(img_id)
recs = registry.get_images_detailed(filters=filters)
self.assertTrue(recs)

filters = {'deleted': True, 'is_public': 'none'}
recs = registry.get_images_detailed(filters=filters)
self.assertTrue(recs)
for rec in recs:
self.assertEqual(rec['status'], 'pending_delete')

# wait for the scrub time on the image to pass
time.sleep(self.api_server.scrub_time)

# scrub images and make sure they get deleted
cmd = ("bin/glance-scrubber --config-file %s" %
self.scrubber_daemon.conf_file_name)
exitcode, out, err = execute(cmd, raise_error=False)
self.assertEqual(0, exitcode)

filters = {'deleted': True, 'is_public': 'none'}
recs = registry.get_images_detailed(filters=filters)
self.assertTrue(recs)
for rec in recs:
self.assertEqual(rec['status'], 'deleted')

self.stop_servers()

0 comments on commit c417382

Please sign in to comment.