From 6de0ca7ae536537a423e092c98f932770a7a6a67 Mon Sep 17 00:00:00 2001 From: Randy Barlow Date: Thu, 6 Aug 2015 01:02:43 -0400 Subject: [PATCH] Create a new Blob model. This commit introduces a new Unit type called Blob, and converts the Docker v2 sync code to use it instead of overloading the Image model that is also used by v1 sync and publish. This will simplify the concepts in Pulp and make the v2 distributor easier to write. Conveniently, it also separates the locations of the Units on the filesystem as well. https://pulp.plan.io/issues/967 re #967 --- common/pulp_docker/common/models.py | 81 ++++++++-- common/test/unit/test_models.py | 51 +++++++ plugins/pulp_docker/plugins/importers/sync.py | 44 ++---- .../pulp_docker/plugins/importers/v1_sync.py | 29 +++- .../test/unit/plugins/importers/test_sync.py | 143 ++++++------------ .../unit/plugins/importers/test_v1_sync.py | 21 ++- plugins/types/docker.json | 7 + 7 files changed, 238 insertions(+), 138 deletions(-) diff --git a/common/pulp_docker/common/models.py b/common/pulp_docker/common/models.py index 2f906cd3..14512264 100644 --- a/common/pulp_docker/common/models.py +++ b/common/pulp_docker/common/models.py @@ -7,23 +7,72 @@ from pulp_docker.common import constants +class Blob(object): + """ + This class is used to represent Docker v2 blobs. + """ + TYPE_ID = 'docker_blob' + + def __init__(self, digest): + """ + Initialize the Blob. + + :param image_id: This field will store the blob's digest. + :type image_id: basestring + """ + self.digest = digest + + @property + def unit_key(self): + """ + Return the Blob's unit key. + + :return: unit key + :rtype: dict + """ + return { + 'digest': self.digest + } + + @property + def metadata(self): + """ + A blob has no metadata, so return an empty dictionary. + + :return: Empty dictionary + :rtype: dict + """ + return {} + + @property + def relative_path(self): + """ + Return the Blob's relative path for filesystem storage. + + :return: the relative path to where this Blob should live + :rtype: basestring + """ + return self.digest + + class Image(object): """ - This class is used to represent Docker v1 images and Docker v2 blobs. + This class is used to represent Docker v1 images. """ TYPE_ID = constants.IMAGE_TYPE_ID def __init__(self, image_id, parent_id, size): """ - :param image_id: For Docker v1 images, this field will store the image_id. For Docker v2 - blobs, this field will store the blob's digest. - :type image_id: basestring - :param parent_id: parent's unique image ID - :type parent_id: basestring - :param size: size of the image in bytes, as reported by docker. - This can be None, because some very old docker images - do not contain it in their metadata. - :type size: int or NoneType + Initialize the Image. + + :param image_id: The Image's id. + :type image_id: basestring + :param parent_id: parent's unique image ID + :type parent_id: basestring + :param size: size of the image in bytes, as reported by docker. + This can be None, because some very old docker images + do not contain it in their metadata. + :type size: int or NoneType """ self.image_id = image_id self.parent_id = parent_id @@ -32,6 +81,8 @@ def __init__(self, image_id, parent_id, size): @property def unit_key(self): """ + Return the Image's unit key. + :return: unit key :rtype: dict """ @@ -42,6 +93,8 @@ def unit_key(self): @property def relative_path(self): """ + Return the Image's relative path for filesystem storage. + :return: the relative path to where this image's directory should live :rtype: basestring """ @@ -50,6 +103,8 @@ def relative_path(self): @property def unit_metadata(self): """ + Return the Image's Metadata. + :return: a subset of the complete docker metadata about this image, including only what pulp_docker cares about :rtype: dict @@ -86,9 +141,9 @@ def __init__(self, digest, name, tag, architecture, fs_layers, history, schema_v :param architecture: The host architecture on which the image is intended to run :type architecture: basestring :param fs_layers: A list of dictionaries. Each dictionary contains one key-value pair - that represents a layer of the image. The key is blobSum, and the - value is the digest of the referenced layer. See the documentation - referenced in the class docblock for more information. + that represents a layer (a Blob) of the image. The key is blobSum, + and the value is the digest of the referenced layer. See the + documentation referenced in the class docblock for more information. :type fs_layers: list :param history: This is a list of unstructured historical data for v1 compatibility. Each member is a dictionary with a "v1Compatibility" key that indexes diff --git a/common/test/unit/test_models.py b/common/test/unit/test_models.py index f0ce44a5..c9d46aec 100644 --- a/common/test/unit/test_models.py +++ b/common/test/unit/test_models.py @@ -34,6 +34,57 @@ def test_metadata(self): self.assertEqual(metadata.get('size'), 1024) +class TestBlob(unittest.TestCase): + """ + This class contains tests for the Blob class. + """ + def test___init__(self): + """ + Assert correct behavior from the __init__() method. + """ + digest = 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef' + + blob = models.Blob(digest) + + self.assertEqual(blob.digest, digest) + + def test_type_id(self): + """ + Assert that the TYPE_ID attribute is correct. + """ + self.assertEqual(models.Blob.TYPE_ID, 'docker_blob') + + def test_unit_key(self): + """ + Assert correct behavior from the unit_key() method. + """ + digest = 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef' + + blob = models.Blob(digest) + + self.assertEqual(blob.unit_key, {'digest': digest}) + + def test_metadata(self): + """ + Assert correct behavior from the metadata() method. + """ + digest = 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef' + + blob = models.Blob(digest) + + self.assertEqual(blob.metadata, {}) + + def test_relative_path(self): + """ + Assert correct behavior from the relative_path() method. + """ + digest = 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef' + + blob = models.Blob(digest) + + self.assertEqual(blob.relative_path, digest) + + class TestManifest(unittest.TestCase): """ This class contains tests for the Manifest class. diff --git a/plugins/pulp_docker/plugins/importers/sync.py b/plugins/pulp_docker/plugins/importers/sync.py index 6c1b70ff..4a4e0774 100644 --- a/plugins/pulp_docker/plugins/importers/sync.py +++ b/plugins/pulp_docker/plugins/importers/sync.py @@ -1,13 +1,10 @@ """ -This module contains the primary sync entry point. Most of the code in this module is for syncing -Docker v2 registries, but if the feed_url is determined not to be a v2 registry this module will -call the SyncStep found in pulp_docker.plugins.importers.v1_sync instead. +This module contains the primary sync entry point for Docker v2 registries. """ from gettext import gettext as _ import logging import os import shutil -import stat from pulp.common.plugins import importer_constants from pulp.plugins.util import nectar_config @@ -23,9 +20,7 @@ class SyncStep(PluginStep): """ - This PluginStep is the primary entry point into a repository sync against a Docker registry. It - will work for either v1 or v2 registries, though if the registry is determined to be a v1 - registry it will simply create the old v1 SyncStep as its only child step. + This PluginStep is the primary entry point into a repository sync against a Docker v2 registry. """ # The sync will fail if these settings are not provided in the config required_settings = (constants.CONFIG_KEY_UPSTREAM_NAME, importer_constants.KEY_FEED) @@ -37,7 +32,7 @@ def __init__(self, repo=None, conduit=None, config=None, required keys are present. It then constructs some needed items (such as a download config), and determines whether the feed URL is a Docker v2 registry or not. If it is, it instantiates child tasks that are appropriate for syncing a v2 registry, and if it is not it - instantiates the old v1 SyncStep as its only child step. + raises a NotImplementedError. :param repo: repository to sync :type repo: pulp.plugins.model.Repository @@ -60,7 +55,7 @@ def __init__(self, repo=None, conduit=None, config=None, upstream_name = config.get(constants.CONFIG_KEY_UPSTREAM_NAME) url = config.get(importer_constants.KEY_FEED) # The GetMetadataStep will set this to a list of dictionaries of the form - # {'image_id': digest}. + # {'digest': digest}. self.available_units = [] # Create a Repository object to interact with. @@ -74,8 +69,8 @@ def __init__(self, repo=None, conduit=None, config=None, working_dir=working_dir) self.add_child(self.step_get_metadata) # save this step so its "units_to_download" attribute can be accessed later - self.step_get_local_units = GetLocalImagesStep( - constants.IMPORTER_TYPE_ID, constants.IMAGE_TYPE_ID, ['image_id'], self.working_dir) + self.step_get_local_units = GetLocalBlobsStep( + constants.IMPORTER_TYPE_ID, models.Blob.TYPE_ID, ['digest'], self.working_dir) self.add_child(self.step_get_local_units) self.add_child( DownloadStep( @@ -94,8 +89,8 @@ def generate_download_requests(self): :rtype: types.GeneratorType """ for unit_key in self.step_get_local_units.units_to_download: - image_id = unit_key['image_id'] - yield self.index_repository.create_blob_download_request(image_id, + digest = unit_key['digest'] + yield self.index_repository.create_blob_download_request(digest, self.get_working_dir()) def sync(self): @@ -207,11 +202,11 @@ def process_main(self): available_blobs.add(layer['blobSum']) # Update the available units with the blobs we learned about - available_blobs = [{'image_id': d} for d in available_blobs] + available_blobs = [{'digest': d} for d in available_blobs] self.parent.parent.available_units.extend(available_blobs) -class GetLocalImagesStep(GetLocalUnitsStep): +class GetLocalBlobsStep(GetLocalUnitsStep): def _dict_to_unit(self, unit_dict): """ convert a unit dictionary (a flat dict that has all unit key, metadata, @@ -231,8 +226,7 @@ def _dict_to_unit(self, unit_dict): :return: a unit instance :rtype: pulp.plugins.model.Unit """ - model = models.Image(unit_dict['image_id'], unit_dict.get('parent_id'), - unit_dict.get('size')) + model = models.Blob(unit_dict['digest']) return self.get_conduit().init_unit(model.TYPE_ID, model.unit_key, {}, model.relative_path) @@ -295,15 +289,13 @@ def process_main(self): _logger.debug('saving manifest %s' % model.digest) self.get_conduit().save_unit(unit) - # Save the Images + # Save the Blobs for unit_key in self.parent.step_get_local_units.units_to_download: - image_id = unit_key['image_id'] - size = os.stat(os.path.join(self.working_dir, unit_key['image_id']))[stat.ST_SIZE] - model = models.Image(image_id, None, size) - unit = self.get_conduit().init_unit(model.TYPE_ID, model.unit_key, model.unit_metadata, + model = models.Blob(unit_key['digest']) + unit = self.get_conduit().init_unit(model.TYPE_ID, model.unit_key, model.metadata, model.relative_path) self._move_file(unit) - _logger.debug('saving Image %s' % image_id) + _logger.debug('saving Blob %s' % unit_key) self.get_conduit().save_unit(unit) def _move_file(self, unit): @@ -314,9 +306,5 @@ def _move_file(self, unit): :param unit: a pulp unit :type unit: pulp.plugins.model.Unit """ - if unit.type_id == models.Image.TYPE_ID: - filename = unit.unit_key['image_id'] - elif unit.type_id == models.Manifest.TYPE_ID: - filename = unit.unit_key['digest'] _logger.debug('moving files in to place for Unit {}'.format(unit)) - shutil.move(os.path.join(self.working_dir, filename), unit.storage_path) + shutil.move(os.path.join(self.working_dir, unit.unit_key['digest']), unit.storage_path) diff --git a/plugins/pulp_docker/plugins/importers/v1_sync.py b/plugins/pulp_docker/plugins/importers/v1_sync.py index 0c76128a..1e77ed69 100644 --- a/plugins/pulp_docker/plugins/importers/v1_sync.py +++ b/plugins/pulp_docker/plugins/importers/v1_sync.py @@ -10,12 +10,12 @@ from pulp.common.plugins import importer_constants from pulp.plugins.util import nectar_config -from pulp.plugins.util.publish_step import PluginStep, DownloadStep +from pulp.plugins.util.publish_step import DownloadStep, GetLocalUnitsStep, PluginStep from pulp.server.exceptions import MissingValue from pulp_docker.common import constants, models from pulp_docker.plugins import registry -from pulp_docker.plugins.importers import sync, tags +from pulp_docker.plugins.importers import tags _logger = logging.getLogger(__name__) @@ -62,7 +62,7 @@ def __init__(self, repo=None, conduit=None, config=None, self.add_child(GetMetadataStep(working_dir=working_dir)) # save this step so its "units_to_download" attribute can be accessed later - self.step_get_local_units = sync.GetLocalImagesStep( + self.step_get_local_units = GetLocalImagesStep( constants.IMPORTER_TYPE_ID, constants.IMAGE_TYPE_ID, ['image_id'], working_dir) self.add_child(self.step_get_local_units) self.add_child(DownloadStep(constants.SYNC_STEP_DOWNLOAD, @@ -219,6 +219,29 @@ def find_and_read_ancestry_file(image_id, parent_dir): return json.load(ancestry_file) +class GetLocalImagesStep(GetLocalUnitsStep): + def _dict_to_unit(self, unit_dict): + """ + convert a unit dictionary (a flat dict that has all unit key, metadata, + etc. keys at the root level) into a Unit object. This requires knowing + not just what fields are part of the unit key, but also how to derive + the storage path. + Any keys in the "metadata" dict on the returned unit will overwrite the + corresponding values that are currently saved in the unit's metadata. In + this case, we pass an empty dict, because we don't want to make changes. + :param unit_dict: a flat dictionary that has all unit key, metadata, + etc. keys at the root level, representing a unit + in pulp + :type unit_dict: dict + :return: a unit instance + :rtype: pulp.plugins.model.Unit + """ + model = models.Image(unit_dict['image_id'], unit_dict.get('parent_id'), + unit_dict.get('size')) + return self.get_conduit().init_unit(model.TYPE_ID, model.unit_key, {}, + model.relative_path) + + class SaveUnits(PluginStep): def __init__(self, working_dir): """ diff --git a/plugins/test/unit/plugins/importers/test_sync.py b/plugins/test/unit/plugins/importers/test_sync.py index 611facb8..d2d0c5d8 100644 --- a/plugins/test/unit/plugins/importers/test_sync.py +++ b/plugins/test/unit/plugins/importers/test_sync.py @@ -84,7 +84,7 @@ def test_process_main_with_one_layer(self, super_process_main, from_json): [{"blobSum": expected_blob_sum}] ) # The layer should have been added to the parent.parent.available_units list - self.assertEqual(step.parent.parent.available_units, [{'image_id': expected_blob_sum}]) + self.assertEqual(step.parent.parent.available_units, [{'digest': expected_blob_sum}]) @mock.patch('pulp_docker.plugins.importers.sync.models.Manifest.from_json', side_effect=models.Manifest.from_json) @@ -130,7 +130,7 @@ def test_process_main_with_repeated_layers(self, super_process_main, from_json): self.assertEqual(step.parent.manifests[digest].fs_layers, expected_fs_layers) # The layers should have been added to the parent.parent.available_units list, in no # particular order - self.assertEqual(set([u['image_id'] for u in step.parent.parent.available_units]), + self.assertEqual(set([u['digest'] for u in step.parent.parent.available_units]), set(expected_blob_sums)) @mock.patch('pulp_docker.plugins.importers.sync.models.Manifest.from_json', @@ -175,27 +175,27 @@ def test_process_main_with_unique_layers(self, super_process_main, from_json): self.assertEqual(step.parent.manifests[digest].fs_layers, expected_fs_layers) # The layers should have been added to the parent.parent.available_units list, in no # particular order - self.assertEqual(set([u['image_id'] for u in step.parent.parent.available_units]), + self.assertEqual(set([u['digest'] for u in step.parent.parent.available_units]), set(expected_blob_sums)) -class TestGetLocalImagesStep(unittest.TestCase): +class TestGetLocalBlobsStep(unittest.TestCase): """ - This class contains tests for the GetLocalImagesStep class. + This class contains tests for the GetLocalBlobsStep class. """ def test__dict_to_unit(self): """ Assert correct behavior from the _dict_to_unit() method. """ - step = sync.GetLocalImagesStep(constants.IMPORTER_TYPE_ID, constants.IMAGE_TYPE_ID, - ['image_id'], '/working/dir') + step = sync.GetLocalBlobsStep(constants.IMPORTER_TYPE_ID, models.Blob.TYPE_ID, + ['digest'], '/working/dir') step.conduit = mock.MagicMock() - unit = step._dict_to_unit({'image_id': 'abc123', 'parent_id': None, 'size': 12}) + unit = step._dict_to_unit({'digest': 'abc123'}) self.assertTrue(unit is step.conduit.init_unit.return_value) step.conduit.init_unit.assert_called_once_with( - constants.IMAGE_TYPE_ID, {'image_id': 'abc123'}, {}, 'abc123') + models.Blob.TYPE_ID, {'digest': 'abc123'}, {}, 'abc123') class TestGetLocalManifestsStep(unittest.TestCase): @@ -291,20 +291,20 @@ def test___init__(self, super___init__): self.assertEqual(step.description, _('Saving manifests and blobs')) @mock.patch('pulp_docker.plugins.importers.sync.shutil.move') - def test__move_files_with_image(self, move): + def test__move_files_with_blob(self, move): """ - Assert correct operation from the _move_files() method with an Image unit. + Assert correct operation from the _move_files() method with a Blob unit. """ working_dir = '/working/dir/' step = sync.SaveUnitsStep(working_dir) - unit_key = {'image_id': 'some_id'} - metadata = {'some': 'metadata'} + unit_key = {'digest': 'some_digest'} + metadata = {} storage_path = '/a/cool/storage/path' - unit = model.Unit(models.Image.TYPE_ID, unit_key, metadata, storage_path) + unit = model.Unit(models.Blob.TYPE_ID, unit_key, metadata, storage_path) step._move_file(unit) - move.assert_called_once_with('/working/dir/some_id', storage_path) + move.assert_called_once_with('/working/dir/some_digest', storage_path) @mock.patch('pulp_docker.plugins.importers.sync.shutil.move') def test__move_files_with_manifest(self, move): @@ -323,18 +323,18 @@ def test__move_files_with_manifest(self, move): move.assert_called_once_with('/working/dir/some_digest', storage_path) @mock.patch('pulp_docker.plugins.importers.sync.SaveUnitsStep._move_file') - def test_process_main_new_images(self, _move_file): + def test_process_main_new_blobs(self, _move_file): """ - Test process_main() when there are new images that were downloaded. + Test process_main() when there are new Blobs that were downloaded. """ working_dir = '/working/dir/' step = sync.SaveUnitsStep(working_dir) - blob_sizes = { - 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef': 256, - 'sha256:cc8567d70002e957612902a8e985ea129d831ebe04057d88fb644857caa45d11': 42} + digests = ( + 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef', + 'sha256:cc8567d70002e957612902a8e985ea129d831ebe04057d88fb644857caa45d11') step.parent = mock.MagicMock() step.parent.step_get_local_units.units_to_download = [ - {'image_id': digest} for digest in sorted(blob_sizes.keys())] + {'digest': digest} for digest in digests] def fake_init_unit(type_id, unit_key, metadata, path): """ @@ -342,61 +342,43 @@ def fake_init_unit(type_id, unit_key, metadata, path): """ return model.Unit(type_id, unit_key, metadata, path) - def fake_stat(path): - """ - Return a fake stat result for the given path. - """ - return (None, None, None, None, None, None, blob_sizes[path.split('/')[-1]]) - step.parent.get_conduit.return_value.init_unit.side_effect = fake_init_unit - with mock.patch('pulp_docker.plugins.importers.sync.os.stat') as stat: - stat.side_effect = fake_stat - - step.process_main() - - # Each image should have been stat'd for its size - self.assertEqual( - [call[1] for call in stat.mock_calls], - [(os.path.join(working_dir, digest),) for digest in sorted(blob_sizes.keys())]) + step.process_main() # Both units should have been moved self.assertEqual(_move_file.call_count, 2) - self.assertEqual(set([call[1][0].unit_key['image_id'] for call in _move_file.mock_calls]), - set([d for d in blob_sizes.keys()])) + self.assertEqual([call[1][0].unit_key['digest'] for call in _move_file.mock_calls], + [d for d in digests]) # Both units should have been saved self.assertEqual(step.parent.get_conduit.return_value.save_unit.call_count, 2) self.assertEqual( - set([call[1][0].unit_key['image_id'] for call in - step.parent.get_conduit.return_value.save_unit.mock_calls]), - set([d for d in blob_sizes.keys()])) - # The Units should have been initialized with the proper sizes - self.assertEqual( - set([call[1][0].metadata['size'] for call in - step.parent.get_conduit.return_value.save_unit.mock_calls]), - set([s for k, s in blob_sizes.items()])) + [call[1][0].unit_key['digest'] for call in + step.parent.get_conduit.return_value.save_unit.mock_calls], + [d for d in digests]) @mock.patch('pulp_docker.plugins.importers.sync.SaveUnitsStep._move_file') - def test_process_main_new_images_and_manifests(self, _move_file): + def test_process_main_new_blobs_and_manifests(self, _move_file): """ - Test process_main() when there are new images and manifests that were downloaded. + Test process_main() when there are new Blobs and manifests that were downloaded. """ working_dir = '/working/dir/' step = sync.SaveUnitsStep(working_dir) # Simulate two newly downloaded blobs - blob_sizes = { - 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef': 256, - 'sha256:cc8567d70002e957612902a8e985ea129d831ebe04057d88fb644857caa45d11': 42} + blob_digests = ( + 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef', + 'sha256:cc8567d70002e957612902a8e985ea129d831ebe04057d88fb644857caa45d11') step.parent = mock.MagicMock() step.parent.step_get_local_units.units_to_download = [ - {'image_id': digest} for digest in sorted(blob_sizes.keys())] + {'digest': digest} for digest in blob_digests] # Simulate one newly downloaded manifest with open(os.path.join(TEST_DATA_PATH, 'manifest_repeated_layers.json')) as manifest_file: manifest = manifest_file.read() - digest = 'sha256:a001e892f3ba0685184486b08cda99bf81f551513f4b56e72954a1d4404195b1' - manifest = models.Manifest.from_json(manifest, digest) - step.parent.step_get_metadata.manifests = {digest: manifest} - step.parent.step_get_metadata.step_get_local_units.units_to_download = [{'digest': digest}] + manifest_digest = 'sha256:a001e892f3ba0685184486b08cda99bf81f551513f4b56e72954a1d4404195b1' + manifest = models.Manifest.from_json(manifest, manifest_digest) + step.parent.step_get_metadata.manifests = {manifest_digest: manifest} + step.parent.step_get_metadata.step_get_local_units.units_to_download = [ + {'digest': manifest_digest}] def fake_init_unit(type_id, unit_key, metadata, path): """ @@ -404,45 +386,28 @@ def fake_init_unit(type_id, unit_key, metadata, path): """ return model.Unit(type_id, unit_key, metadata, path) - def fake_stat(path): - """ - Return a fake stat result for the given path. - """ - return (None, None, None, None, None, None, blob_sizes[path.split('/')[-1]]) - step.parent.get_conduit.return_value.init_unit.side_effect = fake_init_unit - with mock.patch('pulp_docker.plugins.importers.sync.os.stat') as stat: - stat.side_effect = fake_stat - - step.process_main() - - # Each image should have been stat'd for its size - self.assertEqual([call[1] for call in stat.mock_calls], - [(os.path.join(working_dir, d),) for d in sorted(blob_sizes.keys())]) + step.process_main() # All three units should have been moved self.assertEqual(_move_file.call_count, 3) - self.assertEqual(_move_file.mock_calls[0][1][0].unit_key, {'digest': digest}) + self.assertEqual(_move_file.mock_calls[0][1][0].unit_key, {'digest': manifest_digest}) self.assertEqual([call[1][0].unit_key for call in _move_file.mock_calls[1:3]], - [{'image_id': d} for d in sorted(blob_sizes.keys())]) + [{'digest': d} for d in blob_digests]) # All three units should have been saved self.assertEqual(step.parent.get_conduit.return_value.save_unit.call_count, 3) self.assertEqual( step.parent.get_conduit.return_value.save_unit.mock_calls[0][1][0].unit_key, - {'digest': digest}) + {'digest': manifest_digest}) self.assertEqual( - [call[1][0].unit_key['image_id'] for call in + [call[1][0].unit_key['digest'] for call in step.parent.get_conduit.return_value.save_unit.mock_calls[1:3]], - [d for d in sorted(blob_sizes.keys())]) + [d for d in blob_digests]) # The Units' metadata should have been initialized properly self.assertEqual( step.parent.get_conduit.return_value.save_unit.mock_calls[0][1][0].metadata['name'], 'hello-world') - self.assertEqual( - set([call[1][0].metadata['size'] for call in - step.parent.get_conduit.return_value.save_unit.mock_calls[1:3]]), - set([s for k, s in sorted(blob_sizes.items())])) @mock.patch('pulp_docker.plugins.importers.sync.SaveUnitsStep._move_file') def test_process_main_new_manifests(self, _move_file): @@ -470,11 +435,7 @@ def fake_init_unit(type_id, unit_key, metadata, path): step.parent.get_conduit.return_value.init_unit.side_effect = fake_init_unit - with mock.patch('pulp_docker.plugins.importers.sync.os.stat') as stat: - step.process_main() - - # stat() should not have been called since there weren't any new images - self.assertEqual(stat.call_count, 0) + step.process_main() # The new manifest should have been moved self.assertEqual(_move_file.call_count, 1) @@ -503,11 +464,7 @@ def test_process_main_no_units(self, _move_file): step.parent.step_get_metadata.manifests = {} step.parent.step_get_metadata.step_get_local_units.units_to_download = [] - with mock.patch('pulp_docker.plugins.importers.sync.os.stat') as stat: - step.process_main() - - # stat() should not have been called since there weren't any new images - self.assertEqual(stat.call_count, 0) + step.process_main() # Nothing should have been moved self.assertEqual(_move_file.call_count, 0) @@ -552,7 +509,7 @@ def test___init___with_v2_registry(self, api_version_check, _validate): # The correct children should be in place in the right order self.assertEqual( [type(child) for child in step.children], - [sync.GetMetadataStep, sync.GetLocalImagesStep, sync.DownloadStep, sync.SaveUnitsStep]) + [sync.GetMetadataStep, sync.GetLocalBlobsStep, sync.DownloadStep, sync.SaveUnitsStep]) # Ensure the first step was initialized correctly self.assertEqual(step.children[0].repo, repo) self.assertEqual(step.children[0].conduit, conduit) @@ -560,8 +517,8 @@ def test___init___with_v2_registry(self, api_version_check, _validate): self.assertEqual(step.children[0].working_dir, working_dir) # And the second step self.assertEqual(step.children[1].plugin_type, constants.IMPORTER_TYPE_ID) - self.assertEqual(step.children[1].unit_type, models.Image.TYPE_ID) - self.assertEqual(step.children[1].unit_key_fields, ['image_id']) + self.assertEqual(step.children[1].unit_type, models.Blob.TYPE_ID) + self.assertEqual(step.children[1].unit_key_fields, ['digest']) self.assertEqual(step.children[1].working_dir, working_dir) # And the third step self.assertEqual(step.children[2].step_type, constants.SYNC_STEP_DOWNLOAD) @@ -606,7 +563,7 @@ def test_generate_download_requests(self): working_dir = '/some/path' step = sync.SyncStep(repo, conduit, config, working_dir) step.step_get_local_units.units_to_download = [ - {'image_id': i} for i in ['cool', 'stuff']] + {'digest': i} for i in ['cool', 'stuff']] requests = step.generate_download_requests() diff --git a/plugins/test/unit/plugins/importers/test_v1_sync.py b/plugins/test/unit/plugins/importers/test_v1_sync.py index f93ab684..e3668f6a 100644 --- a/plugins/test/unit/plugins/importers/test_v1_sync.py +++ b/plugins/test/unit/plugins/importers/test_v1_sync.py @@ -13,7 +13,7 @@ from pulp.server.exceptions import MissingValue from pulp.server.managers import factory -from pulp_docker.common import constants +from pulp_docker.common import constants, models from pulp_docker.plugins.importers import v1_sync from pulp_docker.plugins import registry @@ -21,6 +21,25 @@ factory.initialize() +class TestGetLocalImagesStep(unittest.TestCase): + """ + This class contains tests for the GetLocalImagesStep class. + """ + def test__dict_to_unit(self): + """ + Assert correct behavior from the _dict_to_unit() method. + """ + step = v1_sync.GetLocalImagesStep(constants.IMPORTER_TYPE_ID, models.Image.TYPE_ID, + ['image_id'], '/working/dir') + step.conduit = mock.MagicMock() + + unit = step._dict_to_unit({'image_id': 'abc123'}) + + self.assertTrue(unit is step.conduit.init_unit.return_value) + step.conduit.init_unit.assert_called_once_with( + models.Image.TYPE_ID, {'image_id': 'abc123'}, {}, 'abc123') + + class TestSyncStep(unittest.TestCase): def setUp(self): super(TestSyncStep, self).setUp() diff --git a/plugins/types/docker.json b/plugins/types/docker.json index 381c8de0..2442340e 100644 --- a/plugins/types/docker.json +++ b/plugins/types/docker.json @@ -1,4 +1,11 @@ {"types": [ + { + "id": "docker_blob", + "display_name": "Docker Blob", + "description": "Docker Blob", + "unit_key": ["digest"], + "search_indexes": [] + }, { "id": "docker_image", "display_name": "Docker Image",