From 944a05f7c131893a3fd46f1a67a53e4a618d95d6 Mon Sep 17 00:00:00 2001 From: Barnaby Court Date: Tue, 14 Jul 2015 16:10:41 -0400 Subject: [PATCH] bcourt's original mongoengine conversion work --- handlers/test/unit/handlers/test_repolib.py | 383 ++++---- plugins/pulp_rpm/plugins/catalogers/rhui.py | 3 +- plugins/pulp_rpm/plugins/catalogers/yum.py | 5 +- plugins/pulp_rpm/plugins/db/models.py | 829 ++++++++++++------ .../export_distributor/distributor.py | 25 +- .../export_distributor/groupdistributor.py | 13 +- .../iso_distributor/distributor.py | 42 +- .../distributors/iso_distributor/publish.py | 6 +- .../plugins/distributors/yum/configuration.py | 26 +- .../plugins/distributors/yum/distributor.py | 41 +- .../distributors/yum/metadata/filelists.py | 4 +- .../distributors/yum/metadata/other.py | 4 +- .../distributors/yum/metadata/package.py | 109 +-- .../distributors/yum/metadata/primary.py | 4 +- .../distributors/yum/metadata/updateinfo.py | 58 +- .../plugins/distributors/yum/publish.py | 213 ++--- .../plugins/importers/iso/importer.py | 26 +- .../pulp_rpm/plugins/importers/iso/sync.py | 69 +- .../plugins/importers/yum/associate.py | 95 +- .../plugins/importers/yum/depsolve.py | 60 +- .../plugins/importers/yum/existing.py | 95 +- .../plugins/importers/yum/importer.py | 22 +- .../plugins/importers/yum/listener.py | 47 +- .../plugins/importers/yum/parse/treeinfo.py | 76 +- .../pulp_rpm/plugins/importers/yum/purge.py | 6 +- .../plugins/importers/yum/repomd/alternate.py | 9 +- .../plugins/importers/yum/repomd/group.py | 81 +- .../plugins/importers/yum/repomd/metadata.py | 11 +- .../plugins/importers/yum/repomd/presto.py | 30 +- .../plugins/importers/yum/repomd/primary.py | 36 +- .../importers/yum/repomd/updateinfo.py | 6 +- .../pulp_rpm/plugins/importers/yum/report.py | 19 +- .../pulp_rpm/plugins/importers/yum/sync.py | 110 +-- .../pulp_rpm/plugins/importers/yum/upload.py | 90 +- .../plugins/migrations/0011_new_importer.py | 5 +- .../migrations/0016_new_yum_distributor.py | 2 +- .../migrations/0022_rename_unit_id_fields.py | 52 ++ plugins/pulp_rpm/plugins/serializers.py | 89 ++ plugins/setup.py | 12 + .../distributors/yum/metadata/test_other.py | 2 +- .../distributors/yum/test_configuration.py | 32 +- .../distributors/yum/test_distributor.py | 235 +++-- .../plugins/distributors/yum/test_publish.py | 2 +- .../test_0011_migrate_new_importer.py | 23 +- .../test_0016_new_yum_distributor.py | 2 +- plugins/types/iso_support.json | 9 - plugins/types/rpm_support.json | 98 --- 47 files changed, 1800 insertions(+), 1416 deletions(-) create mode 100644 plugins/pulp_rpm/plugins/migrations/0022_rename_unit_id_fields.py create mode 100644 plugins/pulp_rpm/plugins/serializers.py delete mode 100644 plugins/types/iso_support.json delete mode 100644 plugins/types/rpm_support.json diff --git a/handlers/test/unit/handlers/test_repolib.py b/handlers/test/unit/handlers/test_repolib.py index bbaa0193f..7c562541d 100644 --- a/handlers/test/unit/handlers/test_repolib.py +++ b/handlers/test/unit/handlers/test_repolib.py @@ -1,5 +1,6 @@ import os import shutil +import tempfile import unittest from pulp.common.constants import DEFAULT_CA_PATH @@ -9,10 +10,6 @@ from pulp_rpm.handlers.repo_file import MirrorListFile, RepoFile, Repo -TEST_REPO_FILENAME = '/tmp/TestRepolibFile.repo' -TEST_MIRROR_LIST_FILENAME = '/tmp/TestRepolibFile.mirrorlist' -TEST_KEYS_DIR = '/tmp/TestRepolibFile-keys' -TEST_CERT_DIR = '/tmp/TestRepolibFile-certificates' CACERT = 'MY-CA-CERTIFICATE' CLIENTCERT = 'MY-CLIENT-KEY-AND-CERTIFICATE' @@ -21,42 +18,20 @@ ENABLED = True -# Lock that doesn't require root privileges -_LOCK_FILE = '/tmp/test_repolib_lock.pid' -LOCK = Lock(_LOCK_FILE) - class TestRepolib(unittest.TestCase): def setUp(self): - # Clean up from any previous runs that may have exited abnormally - if os.path.exists(TEST_REPO_FILENAME): - os.remove(TEST_REPO_FILENAME) - - if os.path.exists(TEST_MIRROR_LIST_FILENAME): - os.remove(TEST_MIRROR_LIST_FILENAME) - - if os.path.exists(TEST_KEYS_DIR): - shutil.rmtree(TEST_KEYS_DIR) - - if os.path.exists(TEST_CERT_DIR): - shutil.rmtree(TEST_CERT_DIR) + self.working_dir = tempfile.mkdtemp() + self.TEST_REPO_FILENAME = os.path.join(self.working_dir, 'TestRepolibFile.repo') + self.TEST_MIRROR_LIST_FILENAME = os.path.join(self.working_dir, + 'TestRepolibFile.mirrorlist') + self.TEST_KEYS_DIR = os.path.join(self.working_dir, 'TestRepolibFile-keys') + self.TEST_CERT_DIR = os.path.join(self.working_dir, 'TestRepolibFile-certificates') + self._LOCK_FILE = os.path.join(self.working_dir, 'test_repolib_lock.pid') + self.LOCK = Lock(self._LOCK_FILE) def tearDown(self): - # Clean up in case the test file was saved in a test - if os.path.exists(TEST_REPO_FILENAME): - os.remove(TEST_REPO_FILENAME) - - if os.path.exists(TEST_MIRROR_LIST_FILENAME): - os.remove(TEST_MIRROR_LIST_FILENAME) - - if os.path.exists(TEST_KEYS_DIR): - shutil.rmtree(TEST_KEYS_DIR) - - if os.path.exists(TEST_CERT_DIR): - shutil.rmtree(TEST_CERT_DIR) - - if os.path.exists(_LOCK_FILE): - os.remove(_LOCK_FILE) + shutil.rmtree(self.working_dir) def test_bind_new_file(self): """ @@ -64,12 +39,13 @@ def test_bind_new_file(self): """ url_list = ['http://pulpserver'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, {}, CLIENTCERT, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, {}, CLIENTCERT, ENABLED, self.LOCK) - self.assertTrue(os.path.exists(TEST_REPO_FILENAME)) - self.assertTrue(not os.path.exists(TEST_MIRROR_LIST_FILENAME)) - repo_file = RepoFile(TEST_REPO_FILENAME) + self.assertTrue(os.path.exists(self.TEST_REPO_FILENAME)) + self.assertTrue(not os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() self.assertEqual(1, len(repo_file.all_repos())) @@ -99,12 +75,14 @@ def test_bind_ssl_verify_false(self): """ url_list = ['http://pulpserver'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, {}, CLIENTCERT, ENABLED, LOCK, verify_ssl=False) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, {}, CLIENTCERT, ENABLED, self.LOCK, + verify_ssl=False) - self.assertTrue(os.path.exists(TEST_REPO_FILENAME)) - self.assertTrue(not os.path.exists(TEST_MIRROR_LIST_FILENAME)) - repo_file = RepoFile(TEST_REPO_FILENAME) + self.assertTrue(os.path.exists(self.TEST_REPO_FILENAME)) + self.assertTrue(not os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() self.assertEqual(1, len(repo_file.all_repos())) @@ -134,12 +112,14 @@ def test_bind_ssl_verify_true_default_ca_path(self): """ url_list = ['http://pulpserver'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, {}, CLIENTCERT, ENABLED, LOCK, verify_ssl=True) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, {}, CLIENTCERT, ENABLED, self.LOCK, + verify_ssl=True) - self.assertTrue(os.path.exists(TEST_REPO_FILENAME)) - self.assertTrue(not os.path.exists(TEST_MIRROR_LIST_FILENAME)) - repo_file = RepoFile(TEST_REPO_FILENAME) + self.assertTrue(os.path.exists(self.TEST_REPO_FILENAME)) + self.assertTrue(not os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() self.assertEqual(1, len(repo_file.all_repos())) @@ -170,13 +150,15 @@ def test_bind_ssl_verify_true_explicit_ca_path(self): url_list = ['http://pulpserver'] ca_path = '/some/path' - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, {}, CLIENTCERT, ENABLED, LOCK, verify_ssl=True, + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, {}, CLIENTCERT, ENABLED, self.LOCK, + verify_ssl=True, ca_path=ca_path) - self.assertTrue(os.path.exists(TEST_REPO_FILENAME)) - self.assertTrue(not os.path.exists(TEST_MIRROR_LIST_FILENAME)) - repo_file = RepoFile(TEST_REPO_FILENAME) + self.assertTrue(os.path.exists(self.TEST_REPO_FILENAME)) + self.assertTrue(not os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() self.assertEqual(1, len(repo_file.all_repos())) @@ -207,19 +189,20 @@ def test_bind_existing_file(self): """ # Setup - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.add_repo(Repo('existing-repo-1')) repo_file.save() # Test url_list = ['http://pulpserver'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, {}, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, {}, None, ENABLED, self.LOCK) # Verify - self.assertTrue(os.path.exists(TEST_REPO_FILENAME)) + self.assertTrue(os.path.exists(self.TEST_REPO_FILENAME)) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() self.assertEqual(2, len(repo_file.all_repos())) @@ -230,14 +213,16 @@ def test_bind_update_repo(self): remain unchanged. """ url_list = ['http://pulp1', 'http://pulp2'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, None, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, None, None, ENABLED, self.LOCK) updated_name = 'Updated' - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, updated_name, None, None, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, updated_name, None, None, None, ENABLED, self.LOCK) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) self.assertEqual(loaded['name'], updated_name) @@ -249,32 +234,36 @@ def test_bind_update_host_urls(self): not necessary in the updated repo. """ url_list = ['http://pulp1', 'http://pulp2'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, None, None, ENABLED, LOCK) - self.assertTrue(os.path.exists(TEST_MIRROR_LIST_FILENAME)) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, None, None, ENABLED, self.LOCK) + self.assertTrue(os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, None, ['http://pulpx'], None, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, None, ['http://pulpx'], None, None, ENABLED, self.LOCK) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) self.assertEqual(loaded['baseurl'], 'http://pulpx') - self.assertTrue(not os.path.exists(TEST_MIRROR_LIST_FILENAME)) + self.assertTrue(not os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) def test_bind_host_urls_one_to_many(self): """ Tests that changing from a single URL to many properly updates the baseurl and mirrorlist entries of the repo. """ - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, ['https://pulpx'], None, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, ['https://pulpx'], None, None, ENABLED, self.LOCK) url_list = ['http://pulp1', 'http://pulp2'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, None, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, None, None, ENABLED, self.LOCK) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) @@ -288,15 +277,17 @@ def test_bind_host_urls_many_to_one(self): """ # Setup url_list = ['http://pulp1', 'http://pulp2'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, None, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, None, None, ENABLED, self.LOCK) # Test - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, ['http://pulpx'], None, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, ['http://pulpx'], None, None, ENABLED, self.LOCK) # Verify - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) @@ -308,20 +299,22 @@ def test_bind_update_keys(self): Tests changing the GPG keys on a previously bound repo. """ keys = {'key1': 'KEY1', 'key2': 'KEY2'} - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, ['http://pulp'], keys, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, ['http://pulp'], keys, None, ENABLED, self.LOCK) new_keys = {'key1': 'KEYX'} - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, None, None, new_keys, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, None, None, new_keys, None, ENABLED, self.LOCK) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) self.assertEqual(loaded['gpgcheck'], '1') self.assertEqual(1, len(loaded['gpgkey'].split('\n'))) - self.assertEqual(1, len(os.listdir(os.path.join(TEST_KEYS_DIR, REPO_ID)))) + self.assertEqual(1, len(os.listdir(os.path.join(self.TEST_KEYS_DIR, REPO_ID)))) key_file = open(loaded['gpgkey'].split('\n')[0][5:], 'r') contents = key_file.read() @@ -335,52 +328,54 @@ def test_bind_update_remove_keys(self): configures the repo and deletes the key files. """ keys = {'key1': 'KEY1', 'key2': 'KEY2'} - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, ['http://pulp'], keys, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, ['http://pulp'], keys, None, ENABLED, self.LOCK) - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, None, None, {}, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, None, None, {}, None, ENABLED, self.LOCK) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) self.assertEqual(loaded['gpgcheck'], '0') self.assertEqual(loaded['gpgkey'], None) - self.assertTrue(not os.path.exists(os.path.join(TEST_KEYS_DIR, REPO_ID))) + self.assertTrue(not os.path.exists(os.path.join(self.TEST_KEYS_DIR, REPO_ID))) def test_clear_ca_path(self): repolib.bind( - TEST_REPO_FILENAME, - TEST_MIRROR_LIST_FILENAME, - TEST_KEYS_DIR, - TEST_CERT_DIR, + self.TEST_REPO_FILENAME, + self.TEST_MIRROR_LIST_FILENAME, + self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, REPO_ID, REPO_NAME, ['http://pulp'], [], CLIENTCERT, ENABLED, - LOCK, + self.LOCK, verify_ssl=True, ca_path='/some/path') repolib.bind( - TEST_REPO_FILENAME, - TEST_MIRROR_LIST_FILENAME, - TEST_KEYS_DIR, - TEST_CERT_DIR, + self.TEST_REPO_FILENAME, + self.TEST_MIRROR_LIST_FILENAME, + self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, REPO_ID, REPO_NAME, ['http://pulp'], [], CLIENTCERT, ENABLED, - LOCK) + self.LOCK) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) - certdir = os.path.join(TEST_CERT_DIR, REPO_ID) + certdir = os.path.join(self.TEST_CERT_DIR, REPO_ID) self.assertTrue(len(os.listdir(certdir)), 1) path = loaded['sslclientcert'] f = open(path) @@ -392,73 +387,73 @@ def test_clear_ca_path(self): def test_clear_clientcert(self): # setup repolib.bind( - TEST_REPO_FILENAME, - TEST_MIRROR_LIST_FILENAME, - TEST_KEYS_DIR, - TEST_CERT_DIR, + self.TEST_REPO_FILENAME, + self.TEST_MIRROR_LIST_FILENAME, + self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, REPO_ID, REPO_NAME, ['http://pulp'], [], CLIENTCERT, ENABLED, - LOCK) + self.LOCK) repolib.bind( - TEST_REPO_FILENAME, - TEST_MIRROR_LIST_FILENAME, - TEST_KEYS_DIR, - TEST_CERT_DIR, + self.TEST_REPO_FILENAME, + self.TEST_MIRROR_LIST_FILENAME, + self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, REPO_ID, REPO_NAME, ['http://pulp'], [], None, ENABLED, - LOCK, + self.LOCK, verify_ssl=True) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) - certdir = os.path.join(TEST_CERT_DIR, REPO_ID) + certdir = os.path.join(self.TEST_CERT_DIR, REPO_ID) self.assertFalse(os.path.exists(certdir)) self.assertTrue(loaded['sslverify'], '1') def test_update_ca_path(self): NEW_PATH = '/new/path/' repolib.bind( - TEST_REPO_FILENAME, - TEST_MIRROR_LIST_FILENAME, - TEST_KEYS_DIR, - TEST_CERT_DIR, + self.TEST_REPO_FILENAME, + self.TEST_MIRROR_LIST_FILENAME, + self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, REPO_ID, REPO_NAME, ['http://pulp'], [], CLIENTCERT, ENABLED, - LOCK, + self.LOCK, verify_ssl=True, ca_path='/some/path/') repolib.bind( - TEST_REPO_FILENAME, - TEST_MIRROR_LIST_FILENAME, - TEST_KEYS_DIR, - TEST_CERT_DIR, + self.TEST_REPO_FILENAME, + self.TEST_MIRROR_LIST_FILENAME, + self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, REPO_ID, REPO_NAME, ['http://pulp'], [], CLIENTCERT, ENABLED, - LOCK, + self.LOCK, verify_ssl=True, ca_path=NEW_PATH) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) - certdir = os.path.join(TEST_CERT_DIR, REPO_ID) + certdir = os.path.join(self.TEST_CERT_DIR, REPO_ID) self.assertTrue(len(os.listdir(certdir)), 1) path = loaded['sslcacert'] self.assertEqual(path, NEW_PATH) @@ -472,35 +467,35 @@ def test_update_ca_path(self): def test_update_clientcert(self): NEWCLIENTCRT = 'THE-NEW-CLIENT-CERT' repolib.bind( - TEST_REPO_FILENAME, - TEST_MIRROR_LIST_FILENAME, - TEST_KEYS_DIR, - TEST_CERT_DIR, + self.TEST_REPO_FILENAME, + self.TEST_MIRROR_LIST_FILENAME, + self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, REPO_ID, REPO_NAME, ['http://pulp'], [], CLIENTCERT, ENABLED, - LOCK) + self.LOCK) repolib.bind( - TEST_REPO_FILENAME, - TEST_MIRROR_LIST_FILENAME, - TEST_KEYS_DIR, - TEST_CERT_DIR, + self.TEST_REPO_FILENAME, + self.TEST_MIRROR_LIST_FILENAME, + self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, REPO_ID, REPO_NAME, ['http://pulp'], [], NEWCLIENTCRT, ENABLED, - LOCK) + self.LOCK) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) - certdir = os.path.join(TEST_CERT_DIR, REPO_ID) + certdir = os.path.join(self.TEST_CERT_DIR, REPO_ID) self.assertTrue(len(os.listdir(certdir)), 1) path = loaded['sslclientcert'] f = open(path) @@ -515,12 +510,13 @@ def test_bind_single_url(self): """ url_list = ['http://pulpserver'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, {}, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, {}, None, ENABLED, self.LOCK) - self.assertTrue(os.path.exists(TEST_REPO_FILENAME)) - self.assertTrue(not os.path.exists(TEST_MIRROR_LIST_FILENAME)) - repo_file = RepoFile(TEST_REPO_FILENAME) + self.assertTrue(os.path.exists(self.TEST_REPO_FILENAME)) + self.assertTrue(not os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) self.assertEqual(loaded['baseurl'], url_list[0]) @@ -533,17 +529,18 @@ def test_bind_multiple_url(self): """ url_list = ['http://pulpserver', 'http://otherserver'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, {}, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, {}, None, ENABLED, self.LOCK) - self.assertTrue(os.path.exists(TEST_REPO_FILENAME)) - self.assertTrue(os.path.exists(TEST_MIRROR_LIST_FILENAME)) - repo_file = RepoFile(TEST_REPO_FILENAME) + self.assertTrue(os.path.exists(self.TEST_REPO_FILENAME)) + self.assertTrue(os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) self.assertTrue('baseurl' not in loaded) - self.assertEqual(loaded['mirrorlist'], 'file:' + TEST_MIRROR_LIST_FILENAME) - mirror_list_file = MirrorListFile(TEST_MIRROR_LIST_FILENAME) + self.assertEqual(loaded['mirrorlist'], 'file:' + self.TEST_MIRROR_LIST_FILENAME) + mirror_list_file = MirrorListFile(self.TEST_MIRROR_LIST_FILENAME) mirror_list_file.load() self.assertEqual(mirror_list_file.entries[0], 'http://pulpserver') self.assertEqual(mirror_list_file.entries[1], 'http://otherserver') @@ -555,15 +552,16 @@ def test_bind_multiple_keys(self): url_list = ['http://pulpserver'] keys = {'key1': 'KEY1', 'key2': 'KEY2'} - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, keys, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, keys, None, ENABLED, self.LOCK) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() loaded = repo_file.get_repo(REPO_ID) self.assertEqual(loaded['gpgcheck'], '1') self.assertEqual(2, len(loaded['gpgkey'].split('\n'))) - self.assertEqual(2, len(os.listdir(os.path.join(TEST_KEYS_DIR, REPO_ID)))) + self.assertEqual(2, len(os.listdir(os.path.join(self.TEST_KEYS_DIR, REPO_ID)))) def test_unbind_repo_exists(self): """ @@ -572,22 +570,23 @@ def test_unbind_repo_exists(self): # Setup repoid = 'test-unbind-repo' - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.add_repo(Repo(repoid)) repo_file.save() # Test - repolib.unbind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - 'test-unbind-repo', LOCK) + repolib.unbind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + 'test-unbind-repo', self.LOCK) # verify - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load( allow_missing=False) # the file should still be there, so error if it doesn't self.assertEqual(0, len(repo_file.all_repos())) - certdir = os.path.join(TEST_CERT_DIR, repoid) + certdir = os.path.join(self.TEST_CERT_DIR, repoid) self.assertFalse(os.path.exists(certdir)) def test_unbind_repo_with_mirrorlist(self): @@ -596,17 +595,19 @@ def test_unbind_repo_with_mirrorlist(self): file. """ url_list = ['http://pulp1', 'http://pulp2', 'http://pulp3'] - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, {}, None, ENABLED, LOCK) - self.assertTrue(os.path.exists(TEST_MIRROR_LIST_FILENAME)) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, {}, None, ENABLED, self.LOCK) + self.assertTrue(os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) - repolib.unbind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, LOCK) + repolib.unbind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, self.LOCK) - repo_file = RepoFile(TEST_REPO_FILENAME) + repo_file = RepoFile(self.TEST_REPO_FILENAME) repo_file.load() self.assertEqual(0, len(repo_file.all_repos())) - self.assertTrue(not os.path.exists(TEST_MIRROR_LIST_FILENAME)) + self.assertTrue(not os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) def test_unbind_repo_with_keys(self): """ @@ -614,14 +615,16 @@ def test_unbind_repo_with_keys(self): """ url_list = ['http://pulp1'] keys = {'key1': 'KEY1', 'key2': 'KEY2'} - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, url_list, keys, None, ENABLED, LOCK) - self.assertTrue(os.path.exists(os.path.join(TEST_KEYS_DIR, REPO_ID))) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, url_list, keys, None, ENABLED, self.LOCK) + self.assertTrue(os.path.exists(os.path.join(self.TEST_KEYS_DIR, REPO_ID))) - repolib.unbind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, LOCK) + repolib.unbind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, self.LOCK) - self.assertTrue(not os.path.exists(os.path.join(TEST_KEYS_DIR, REPO_ID))) + self.assertTrue(not os.path.exists(os.path.join(self.TEST_KEYS_DIR, REPO_ID))) def test_unbind_missing_file(self): """ @@ -630,11 +633,12 @@ def test_unbind_missing_file(self): """ # Setup - self.assertTrue(not os.path.exists(TEST_REPO_FILENAME)) + self.assertTrue(not os.path.exists(self.TEST_REPO_FILENAME)) # Test - repolib.unbind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, LOCK) + repolib.unbind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, self.LOCK) # Verify # The above shouldn't throw an error @@ -644,22 +648,25 @@ def test_unbind_missing_repo(self): Tests that calling unbind on a repo that isn't bound does not result in an error. """ - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, ['http://pulp'], {}, None, ENABLED, LOCK) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, ['http://pulp'], {}, None, ENABLED, self.LOCK) # This shouldn't throw an error; the net effect is still that the repo is unbound. This test # just makes sure this runs without error, which is why there are no assertions. - repolib.unbind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - 'fake-repo', LOCK) + repolib.unbind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + 'fake-repo', self.LOCK) def test_delete_repo_file(self): """ Tests that calling delete_repo_file deletes the repo file. """ - repolib.bind(TEST_REPO_FILENAME, TEST_MIRROR_LIST_FILENAME, TEST_KEYS_DIR, TEST_CERT_DIR, - REPO_ID, REPO_NAME, ['http://pulp'], {}, None, ENABLED, LOCK) - self.assertTrue(os.path.exists(TEST_REPO_FILENAME)) + repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, + self.TEST_CERT_DIR, + REPO_ID, REPO_NAME, ['http://pulp'], {}, None, ENABLED, self.LOCK) + self.assertTrue(os.path.exists(self.TEST_REPO_FILENAME)) - repolib.delete_repo_file(TEST_REPO_FILENAME, LOCK) + repolib.delete_repo_file(self.TEST_REPO_FILENAME, self.LOCK) - self.assertFalse(os.path.exists(TEST_REPO_FILENAME)) + self.assertFalse(os.path.exists(self.TEST_REPO_FILENAME)) diff --git a/plugins/pulp_rpm/plugins/catalogers/rhui.py b/plugins/pulp_rpm/plugins/catalogers/rhui.py index ec5b77a1f..4f7f7cb81 100644 --- a/plugins/pulp_rpm/plugins/catalogers/rhui.py +++ b/plugins/pulp_rpm/plugins/catalogers/rhui.py @@ -3,6 +3,7 @@ from base64 import urlsafe_b64encode from contextlib import closing +from pulp_rpm.common import ids from pulp_rpm.plugins.db import models from pulp_rpm.plugins.catalogers.yum import YumCataloger @@ -32,7 +33,7 @@ def metadata(cls): return { 'id': TYPE_ID, 'display_name': "RHUI Cataloger", - 'types': [models.RPM.TYPE] + 'types': [ids.TYPE_ID_RPM] } def nectar_config(self, config): diff --git a/plugins/pulp_rpm/plugins/catalogers/yum.py b/plugins/pulp_rpm/plugins/catalogers/yum.py index 97a9e6326..534cd6ffa 100644 --- a/plugins/pulp_rpm/plugins/catalogers/yum.py +++ b/plugins/pulp_rpm/plugins/catalogers/yum.py @@ -5,6 +5,7 @@ from pulp.plugins.cataloger import Cataloger from pulp.server.content.sources import descriptor +from pulp_rpm.common import ids from pulp_rpm.plugins.db import models from pulp_rpm.plugins.importers.yum.repomd.metadata import MetadataFiles from pulp_rpm.plugins.importers.yum.repomd import primary, nectar_factory @@ -29,7 +30,7 @@ def metadata(cls): return { 'id': TYPE_ID, 'display_name': "Yum Cataloger", - 'types': [models.RPM.TYPE] + 'types': [ids.TYPE_ID_RPM] } @staticmethod @@ -50,7 +51,7 @@ def _add_packages(conduit, base_url, md_files): for model in _packages: unit_key = model.unit_key url = urljoin(base_url, model.download_path) - conduit.add_entry(models.RPM.TYPE, unit_key, url) + conduit.add_entry(models.RPM.unit_type_id, unit_key, url) finally: fp.close() diff --git a/plugins/pulp_rpm/plugins/db/models.py b/plugins/pulp_rpm/plugins/db/models.py index 290fa0d2c..9cf235628 100644 --- a/plugins/pulp_rpm/plugins/db/models.py +++ b/plugins/pulp_rpm/plugins/db/models.py @@ -1,93 +1,72 @@ import csv import logging import os -from collections import namedtuple from gettext import gettext as _ from urlparse import urljoin +import mongoengine from pulp.plugins.util import verification +from pulp.server.db.model import ContentUnit -from pulp_rpm.common import constants, ids, version_utils +from pulp_rpm.common import version_utils from pulp_rpm.common import file_utils - +from pulp_rpm.plugins import serializers _LOGGER = logging.getLogger(__name__) -class Package(object): - UNIT_KEY_NAMES = tuple() - TYPE = None - NAMEDTUPLE = None +class Package(ContentUnit): - def __init__(self, local_vars): - self.metadata = local_vars.get('metadata', {}) - for name in self.UNIT_KEY_NAMES: - setattr(self, name, local_vars[name]) - # Add the serialized version and release if available - if name == 'version': - self.metadata['version_sort_index'] = version_utils.encode(local_vars[name]) - elif name == 'release': - self.metadata['release_sort_index'] = version_utils.encode(local_vars[name]) + meta = { + 'abstract': True, + } - @property - def unit_key(self): - key = {} - for name in self.UNIT_KEY_NAMES: - key[name] = getattr(self, name) - return key + def __str__(self): + return '%s: %s' % (self.unit_type_id, + '-'.join(getattr(self, name) for name in self.unit_key_fields)) - @property - def as_named_tuple(self): - """ - :return: - :rtype collections.namedtuple - """ - return self.NAMEDTUPLE(**self.unit_key) +class VersionedPackage(Package): + + # All subclasses use both a version and a release + version = mongoengine.StringField(required=True) + release = mongoengine.StringField(required=True) + + # We generate these two + version_sort_index = mongoengine.StringField() + release_sort_index = mongoengine.StringField() + + meta = { + 'abstract': True, + } @classmethod - def from_package_info(cls, package_info): - unit_key = {} - metadata = {} - for key, value in package_info.iteritems(): - if key in cls.UNIT_KEY_NAMES: - unit_key[key] = value - elif key == 'type' and cls != Errata: - continue - else: - metadata[key] = value - unit_key['metadata'] = metadata - - return cls(**unit_key) - - def clean_metadata(self): - """ - Iterate through each key in the "metadata" dict, and if it starts with - a "_", delete it. This is to clean out mongo-specific and platform-specific - data. In the future, this will likely go away if we more strongly define - which fields each model will hold. + def pre_save_signal(cls, sender, document, **kwargs): """ - for key in self.metadata.keys(): - if key.startswith('_'): - del self.metadata[key] - - def __str__(self): - return '%s: %s' % (self.TYPE, '-'.join(getattr(self, name) for name in self.UNIT_KEY_NAMES)) + Generate the version & Release sort index before saving + :param sender: sender class + :type sender: object + :param document: Document that sent the signal + :type document: pulp_rpm.plugins.db.models.VersionedPackage + """ + super(VersionedPackage, cls).pre_save_signal(sender, document, **kwargs) + document.version_sort_index = version_utils.encode(document.version) + document.release_sort_index = version_utils.encode(document.release) -class VersionedPackage(Package): + # Used by RPM, SRPM, DRPM @property def key_string_without_version(self): - keys = [getattr(self, key) for key in self.UNIT_KEY_NAMES if - key not in ['epoch', 'version', 'release', 'checksum', 'checksumtype']] - keys.append(self.TYPE) + keys = [getattr(self, key) for key in self.unit_key_fields if + key not in ['epoch', 'version', 'release', 'checksum', 'checksum_type']] + keys.append(self.unit_type_id) return '-'.join(keys) @property def complete_version(self): values = [] for name in ('epoch', 'version', 'release'): - if name in self.UNIT_KEY_NAMES: + if name in self.unit_key_fields: values.append(getattr(self, name)) return tuple(values) @@ -95,6 +74,7 @@ def complete_version(self): def complete_version_serialized(self): return tuple(version_utils.encode(field) for field in self.complete_version) + # TODO DANGER DANGER, WHAT HAPPENS WITH MongoEngine BaseDocument def __cmp__(self, other): return cmp( self.complete_version_serialized, @@ -103,19 +83,38 @@ def __cmp__(self, other): class Distribution(Package): - UNIT_KEY_NAMES = ('id', 'family', 'variant', 'version', 'arch') - TYPE = ids.TYPE_ID_DISTRO - - def __init__(self, family, variant, version, arch, metadata, id=None): - kwargs = locals() - # I don't know why this is the "id", but am following the pattern of the - # original importer - if kwargs['id'] is None: - # the original importer leaves out any elements that are None, so - # we will blindly trust that here. - id_pieces = filter(lambda x: x is not None, ('ks', family, variant, version, arch)) - kwargs['id'] = '-'.join(id_pieces) - super(Distribution, self).__init__(kwargs) + + distribution_id = mongoengine.StringField(required=True) + family = mongoengine.StringField(required=True) + variant = mongoengine.StringField(required=True) + version = mongoengine.StringField(required=True) + arch = mongoengine.StringField(required=True) + + files = mongoengine.ListField() + timestamp = mongoengine.FloatField() + packagedir = mongoengine.StringField() + + # Pretty sure the version_sort_index is never used for Distribution units + version_sort_index = mongoengine.StringField() + + # For backward compatibility + _ns = mongoengine.StringField(default='units_distribution') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='distribution') + + unit_key_fields = ('distribution_id', 'family', 'variant', 'version', 'arch') + + meta = {'collection': 'units_distribution', + 'indexes': [ + 'distribution_id', 'family', 'variant', 'version', 'arch', + # Unit key Index + { + 'fields': ['distribution_id', 'family', 'variant', 'version', 'arch'], + 'unique': True + }], + 'allow_inheritance': False} + + SERIALIZER = serializers.Distribution @property def relative_path(self): @@ -124,7 +123,30 @@ def relative_path(self): related files get stored. For most unit types, this path is to one file. """ - return self.id + return self.distribution_id + + @classmethod + def pre_save_signal(cls, sender, document, **kwargs): + """ + Generate the version & Release sort index before saving + + :param sender: sender class + :type sender: object + :param document: Document that sent the signal + :type document: pulp_rpm.plugins.db.models.Distribution + """ + document.version_sort_index = version_utils.encode(document.version) + if not document.distribution_id: + # the original importer leaves out any elements that are None, so + # we will blindly trust that here. + id_pieces = filter(lambda x: x is not None, + ('ks', + document.family, + document.variant, + document.version, + document.arch)) + document.distribution_id = '-'.join(id_pieces) + super(Package, cls).pre_save_signal(sender, document, **kwargs) def process_download_reports(self, reports): """ @@ -134,22 +156,21 @@ def process_download_reports(self, reports): :param reports: list of successful download reports :type reports: list(pulp.common.download.report.DownloadReport) """ - # TODO: maybe this shouldn't be in common - metadata_files = self.metadata.setdefault('files', []) + if not isinstance(self.files, list): + self.files = [] + for report in reports: # the following data model is mostly intended to match what the # previous importer generated. - metadata_files.append({ + self.files.append({ 'checksum': report.data['checksum'], 'checksumtype': verification.sanitize_checksum_type(report.data['checksumtype']), 'downloadurl': report.url, 'filename': os.path.basename(report.data['relativepath']), 'fileName': os.path.basename(report.data['relativepath']), - 'item_type': self.TYPE, + 'item_type': "distribution", 'pkgpath': os.path.join( - constants.DISTRIBUTION_STORAGE_PATH, - self.id, - os.path.dirname(report.data['relativepath']), + self.storage_path, os.path.dirname(report.data['relativepath']), ), 'relativepath': report.data['relativepath'], 'savepath': report.destination, @@ -158,59 +179,198 @@ def process_download_reports(self, reports): class DRPM(VersionedPackage): - UNIT_KEY_NAMES = ('epoch', 'version', 'release', 'filename', 'checksumtype', 'checksum') - TYPE = ids.TYPE_ID_DRPM - def __init__(self, epoch, version, release, filename, checksumtype, checksum, metadata): - checksumtype = verification.sanitize_checksum_type(checksumtype) - Package.__init__(self, locals()) + # Unit Key Fields + epoch = mongoengine.StringField(required=True) + file_name = mongoengine.StringField(db_field='filename', required=True) + checksum_type = mongoengine.StringField(db_field='checksumtype', required=True) + checksum = mongoengine.StringField(required=True) + + # Other Fields + sequence = mongoengine.StringField() + new_package = mongoengine.StringField() + arch = mongoengine.StringField() + size = mongoengine.IntField() + old_epoch = mongoengine.StringField(db_field='oldepoch') + old_version = mongoengine.StringField(db_field='oldversion') + old_release = mongoengine.StringField(db_field='oldrelease') + + # For backward compatibility + _ns = mongoengine.StringField(default='units_drpm') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='drpm') + + unit_key_fields = ('epoch', 'version', 'release', 'file_name', 'checksum_type', 'checksum') + + meta = {'collection': 'units_drpm', + 'indexes': [ + "epoch", "version", "release", "file_name", "checksum", + # Unit key Index + { + 'fields': ["epoch", "version", "release", 'file_name', "checksum_type", "checksum"], + 'unique': True + }], + 'allow_inheritance': False} + + SERIALIZER = serializers.Drpm + + def __init__(self, *args, **kwargs): + if 'checksum_type' in kwargs: + kwargs['checksum_type'] = verification.sanitize_checksum_type(kwargs['checksum_type']) + super(DRPM, self).__init__(*args, **kwargs) @property def relative_path(self): - return self.filename + """ + This should only be used during the initial sync + """ + return self.file_name @property def download_path(self): - return self.filename + """ + This should only be used during the initial sync + """ + return self.file_name + + +class RpmBase(VersionedPackage): + + # Unit Key Fields + name = mongoengine.StringField(required=True) + epoch = mongoengine.StringField(required=True) + version = mongoengine.StringField(required=True) + release = mongoengine.StringField(required=True) + arch = mongoengine.StringField(required=True) + checksum_type = mongoengine.StringField(db_field='checksumtype', required=True) + checksum = mongoengine.StringField(required=True) + + # Other Fields + build_time = mongoengine.IntField() + buildhost = mongoengine.StringField() + vendor = mongoengine.StringField() + size = mongoengine.IntField() + base_url = mongoengine.StringField() + file_name = mongoengine.StringField(db_field='filename') + relative_url_path = mongoengine.StringField() + relative_path = mongoengine.StringField(db_field='relativepath') + group = mongoengine.StringField() + + provides = mongoengine.ListField() + files = mongoengine.DictField() + repodata = mongoengine.DictField(default={}) + description = mongoengine.StringField() + header_range = mongoengine.DictField() + source_rpm = mongoengine.StringField(db_field='sourcerpm') + license = mongoengine.StringField() + changelog = mongoengine.ListField() + url = mongoengine.StringField() + summary = mongoengine.StringField() + time = mongoengine.IntField() + requires = mongoengine.ListField() + + # For backward compatibility + _ns = mongoengine.StringField(default='units_rpm') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='rpm') + + unit_key_fields = ('name', 'epoch', 'version', 'release', 'arch', 'checksum_type', 'checksum') + + meta = {'indexes': [ + "name", "epoch", "version", "release", "arch", "file_name", "checksum", + "checksum_type", "version_sort_index", + ("version_sort_index", "release_sort_index"), + # Unit key Index + { + 'fields': ["name", "epoch", "version", "release", "arch", + "checksum_type", "checksum"], + 'unique': True + }], + 'abstract': True} + + SERIALIZER = serializers.RpmBase + + def __init__(self, *args, **kwargs): + if 'checksum_type' in kwargs: + kwargs['checksum_type'] = verification.sanitize_checksum_type(kwargs['checksum_type']) + super(RpmBase, self).__init__(*args, **kwargs) + # raw_xml is only used during the initial sync + self.raw_xml = '' + @property + def download_path(self): + """ + This should only be used during the initial sync + """ + return os.path.join(self.checksum, self.file_name) -class RPM(VersionedPackage): - UNIT_KEY_NAMES = ('name', 'epoch', 'version', 'release', 'arch', 'checksumtype', 'checksum') - TYPE = ids.TYPE_ID_RPM - def __init__(self, name, epoch, version, release, arch, checksumtype, checksum, metadata): - checksumtype = verification.sanitize_checksum_type(checksumtype) - Package.__init__(self, locals()) - self.raw_xml = '' +class RPM(RpmBase): - @property - def relative_path(self): - unit_key = self.unit_key - return os.path.join( - unit_key['name'], unit_key['version'], unit_key['release'], - unit_key['arch'], unit_key['checksum'], self.metadata['filename'] - ) + # For backward compatibility + _ns = mongoengine.StringField(default='units_rpm') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='rpm') + meta = {'collection': 'units_rpm', + 'allow_inheritance': False} - @property - def download_path(self): - return self.metadata['relativepath'] +class SRPM(RpmBase): -class SRPM(RPM): - TYPE = ids.TYPE_ID_SRPM + # For backward compatibility + _ns = mongoengine.StringField(default='units_srpm') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='srpm') + meta = { + 'collection': 'units_srpm', + 'allow_inheritance': False} class Errata(Package): - UNIT_KEY_NAMES = ('id',) - TYPE = ids.TYPE_ID_ERRATA - def __init__(self, id, metadata): - Package.__init__(self, locals()) + errata_id = mongoengine.StringField(required=True) + status = mongoengine.StringField() + updated = mongoengine.StringField(required=True, default='') + description = mongoengine.StringField() + issued = mongoengine.StringField() + pushcount = mongoengine.StringField() + references = mongoengine.ListField() + reboot_suggested = mongoengine.BooleanField() + errata_from = mongoengine.StringField(db_field='from') + severity = mongoengine.StringField() + rights = mongoengine.StringField() + version = mongoengine.StringField() + release = mongoengine.StringField() + type = mongoengine.StringField() + pkglist = mongoengine.ListField() + title = mongoengine.StringField() + solution = mongoengine.StringField() + summary = mongoengine.StringField() + + # For backward compatibility + _ns = mongoengine.StringField(default='units_erratum') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='erratum') + + unit_key_fields = ('errata_id',) + + meta = {'indexes': [ + "errata_id", "version", "release", "type", "status", "updated", + "issued", "severity", "references", + # Unit key Index + { + 'fields': unit_key_fields, + 'unique': True + }], + 'collection': 'units_erratum', + 'allow_inheritance': False} + + SERIALIZER = serializers.Errata @property def rpm_search_dicts(self): ret = [] - for collection in self.metadata.get('pkglist', []): + for collection in self.pkglist: for package in collection.get('packages', []): if len(package.get('sum') or []) == 2: checksum = package['sum'][1] @@ -226,9 +386,9 @@ def rpm_search_dicts(self): rpm = RPM(name=package['name'], epoch=package['epoch'], version=package['version'], release=package['release'], arch=package['arch'], checksum=checksum, - checksumtype=checksumtype, metadata={}) + checksum_type=checksumtype) unit_key = rpm.unit_key - for key in ['checksum', 'checksumtype']: + for key in ['checksum', 'checksum_type']: if unit_key[key] is None: del unit_key[key] ret.append(unit_key) @@ -236,55 +396,145 @@ def rpm_search_dicts(self): class PackageGroup(Package): - UNIT_KEY_NAMES = ('id', 'repo_id') - TYPE = ids.TYPE_ID_PKG_GROUP - def __init__(self, id, repo_id, metadata): - Package.__init__(self, locals()) - # these attributes should default to False based on yum.comps.Group.parse - for name in ('default', 'user_visible'): - if self.metadata.get(name) is None: - self.metadata[name] = False + package_group_id = mongoengine.StringField(required=True) + repo_id = mongoengine.StringField(required=True) + + description = mongoengine.StringField() + default_package_names = mongoengine.ListField() + optional_package_names = mongoengine.ListField() + mandatory_package_names = mongoengine.ListField() + name = mongoengine.StringField() + default = mongoengine.BooleanField(default=False) + display_order = mongoengine.IntField() + user_visible = mongoengine.BooleanField(default=False) + translated_name = mongoengine.DictField() + translated_description = mongoengine.DictField() + langonly = mongoengine.StringField() + conditional_package_names = mongoengine.ListField() + + # For backward compatibility + _ns = mongoengine.StringField(default='units_package_group') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='package_group') + + unit_key_fields = ('package_group_id', 'repo_id') + + meta = { + 'indexes': [ + 'package_group_id', 'repo_id', 'name', 'mandatory_package_names', + 'conditional_package_names', + 'optional_package_names', 'default_package_names', + # Unit key Index + { + 'fields': ('package_group_id', 'repo_id'), + 'unique': True + }], + 'collection': 'units_package_group', + 'allow_inheritance': False} + + SERIALIZER = serializers.PackageGroup + # + # UNIT_KEY_NAMES = ('id', 'repo_id') + # TYPE = ids.TYPE_ID_PKG_GROUP @property def all_package_names(self): names = [] - for list_name in [ - 'mandatory_package_names', - 'default_package_names', - 'optional_package_names', - # TODO: conditional package names - ]: - names.extend(self.metadata.get(list_name, [])) + names.extend(self.mandatory_package_names) + names.extend(self.default_package_names) + names.extend(self.optional_package_names) + # TODO: conditional package names return names class PackageCategory(Package): - UNIT_KEY_NAMES = ('id', 'repo_id') - TYPE = ids.TYPE_ID_PKG_CATEGORY - def __init__(self, id, repo_id, metadata): - Package.__init__(self, locals()) - - @property - def group_names(self): - return self.metadata.get('packagegroupids', []) + package_category_id = mongoengine.StringField(required=True) + repo_id = mongoengine.StringField(required=True) + + description = mongoengine.StringField() + group_ids = mongoengine.ListField(db_field='packagegroupids') + translated_description = mongoengine.DictField() + translated_name = mongoengine.DictField() + display_order = mongoengine.IntField() + name = mongoengine.StringField() + + # For backward compatibility + _ns = mongoengine.StringField(default='units_package_category') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='package_category') + + unit_key_fields = ('package_category_id', 'repo_id') + + meta = { + 'indexes': [ + 'package_category_id', 'repo_id', 'name', 'group_ids', + # Unit key Index + { + 'fields': ('package_category_id', 'repo_id'), + 'unique': True + }], + 'collection': 'units_package_category', + 'allow_inheritance': False} + + SERIALIZER = serializers.PackageCategory + # UNIT_KEY_NAMES = ('id', 'repo_id') + # TYPE = ids.TYPE_ID_PKG_CATEGORY + # + # def __init__(self, id, repo_id, metadata): + # Package.__init__(self, locals()) + # + # @property + # def group_names(self): + # return self.metadata.get('packagegroupids', []) class PackageEnvironment(Package): - UNIT_KEY_NAMES = ('id', 'repo_id') - TYPE = ids.TYPE_ID_PKG_ENVIRONMENT - - def __init__(self, id, repo_id, metadata): - Package.__init__(self, locals()) - - @property - def group_ids(self): - return self.metadata.get('group_ids', []) - - @property - def options(self): - return self.metadata.get('options', []) + package_environment_id = mongoengine.StringField(required=True) + repo_id = mongoengine.StringField(required=True) + + group_ids = mongoengine.ListField() + description = mongoengine.StringField() + translated_name = mongoengine.DictField() + translated_description = mongoengine.DictField() + options = mongoengine.ListField() + display_order = mongoengine.IntField() + name = mongoengine.StringField() + + # For backward compatibility + _ns = mongoengine.StringField(default='units_package_environment') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='package_environment') + + unit_key_fields = ('package_environment_id', 'repo_id') + + meta = { + 'indexes': [ + 'package_environment_id', 'repo_id', 'name', 'group_ids', + # Unit key Index + { + 'fields': ('package_environment_id', 'repo_id'), + 'unique': True + }], + 'collection': 'units_package_environment', + 'allow_inheritance': False} + + SERIALIZER = serializers.PackageEnvironment + + # UNIT_KEY_NAMES = ('id', 'repo_id') + # TYPE = ids.TYPE_ID_PKG_ENVIRONMENT + + # def __init__(self, id, repo_id, metadata): + # Package.__init__(self, locals()) + + # @property + # def group_ids(self): + # return self.metadata.get('group_ids', []) + + # @property + # def options(self): + # return self.metadata.get('options', []) @property def optional_group_ids(self): @@ -292,51 +542,77 @@ def optional_group_ids(self): class YumMetadataFile(Package): - UNIT_KEY_NAMES = ('data_type', 'repo_id') - TYPE = ids.TYPE_ID_YUM_REPO_METADATA_FILE - - def __init__(self, data_type, repo_id, metadata): - Package.__init__(self, locals()) - - @property - def relative_dir(self): - """ - returns the relative path to the directory where the file should be - stored. Since we don't have the filename in the metadata, we can't - derive the full path here. - """ - return self.repo_id - - -TYPE_MAP = { - Distribution.TYPE: Distribution, - DRPM.TYPE: DRPM, - Errata.TYPE: Errata, - PackageCategory.TYPE: PackageCategory, - PackageGroup.TYPE: PackageGroup, - PackageEnvironment.TYPE: PackageEnvironment, - RPM.TYPE: RPM, - SRPM.TYPE: SRPM, - YumMetadataFile.TYPE: YumMetadataFile, -} - -# put the NAMEDTUPLE attribute on each model class -for model_class in TYPE_MAP.values(): - model_class.NAMEDTUPLE = namedtuple(model_class.TYPE, model_class.UNIT_KEY_NAMES) - - -def from_typed_unit_key_tuple(typed_tuple): - """ - This assumes that the __init__ method takes unit key arguments in order - followed by a dictionary for other metadata. - - :param typed_tuple: - :return: - """ - package_class = TYPE_MAP[typed_tuple[0]] - args = typed_tuple[1:] - foo = {'metadata': {}} - return package_class.from_package_info(*args, **foo) + data_type = mongoengine.StringField(required=True) + repo_id = mongoengine.StringField(required=True) + + checksum = mongoengine.StringField() + checksum_type = mongoengine.StringField() + + # For backward compatibility + _ns = mongoengine.StringField(default='units_yum_repo_metadata_file') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='yum_repo_metadata_file') + + unit_key_fields = ('data_type', 'repo_id') + + meta = { + 'indexes': [ + 'data_type', + # Unit key Index + { + 'fields': ('data_type', 'repo_id'), + 'unique': True + }], + 'collection': 'units_yum_repo_metadata_file', + 'allow_inheritance': False} + + SERIALIZER = serializers.YumMetadataFile + + # UNIT_KEY_NAMES = ('data_type', 'repo_id') + # TYPE = ids.TYPE_ID_YUM_REPO_METADATA_FILE + + # def __init__(self, data_type, repo_id, metadata): + # Package.__init__(self, locals()) + # + # @property + # def relative_dir(self): + # """ + # returns the relative path to the directory where the file should be + # stored. Since we don't have the filename in the metadata, we can't + # derive the full path here. + # """ + # return self.repo_id + +# +# TYPE_MAP = { +# Distribution.TYPE: Distribution, +# DRPM.TYPE: DRPM, +# Errata.TYPE: Errata, +# PackageCategory.TYPE: PackageCategory, +# PackageGroup.TYPE: PackageGroup, +# PackageEnvironment.TYPE: PackageEnvironment, +# RPM.TYPE: RPM, +# SRPM.TYPE: SRPM, +# YumMetadataFile.TYPE: YumMetadataFile, +# } +# +# # put the NAMEDTUPLE attribute on each model class +# for model_class in TYPE_MAP.values(): +# model_class.NAMEDTUPLE = namedtuple(model_class.TYPE, model_class.UNIT_KEY_NAMES) + + +# def from_typed_unit_key_tuple(typed_tuple): +# """ +# This assumes that the __init__ method takes unit key arguments in order +# followed by a dictionary for other metadata. +# +# :param typed_tuple: +# :return: +# """ +# package_class = TYPE_MAP[typed_tuple[0]] +# args = typed_tuple[1:] +# foo = {'metadata': {}} +# return package_class.from_package_info(*args, **foo) # ------------ ISO Models --------------- # @@ -345,70 +621,93 @@ def from_typed_unit_key_tuple(typed_tuple): CHECKSUM_CHUNK_SIZE = 32 * 1024 * 1024 -class ISO(object): +class ISO(ContentUnit): """ This is a handy way to model an ISO unit, with some related utilities. """ - TYPE = ids.TYPE_ID_ISO - UNIT_KEY_ISO = ('name', 'size', 'checksum') - - def __init__(self, name, size, checksum, unit=None): - """ - Initialize an ISO, with its name, size, and checksum. - - :param name: The name of the ISO - :type name: basestring - :param size: The size of the ISO, in bytes - :type size: int - :param checksum: The SHA-256 checksum of the ISO - :type checksum: basestring - """ - self.name = name - self.size = size - self.checksum = checksum - - # This is the Unit that the ISO represents. An ISO doesn't always have a Unit backing it, - # particularly during repository synchronization or ISO uploads when the ISOs are being - # initialized. - self._unit = unit - - @classmethod - def from_unit(cls, unit): - """ - Construct an ISO out of a Unit. - """ - return cls(unit.unit_key['name'], unit.unit_key['size'], unit.unit_key['checksum'], unit) - - def init_unit(self, conduit): - """ - Use the given conduit's init_unit() call to initialize a unit, and store the unit as - self._unit. - - :param conduit: The conduit to call init_unit() to get a Unit. - :type conduit: pulp.plugins.conduits.mixins.AddUnitMixin - """ - relative_path = os.path.join(self.name, self.checksum, str(self.size), self.name) - unit_key = {'name': self.name, 'size': self.size, 'checksum': self.checksum} - metadata = {} - self._unit = conduit.init_unit(self.TYPE, unit_key, metadata, relative_path) - - def save_unit(self, conduit): - """ - Use the given conduit's save_unit() call to save self._unit. - - :param conduit: The conduit to call save_unit() with. - :type conduit: pulp.plugins.conduits.mixins.AddUnitMixin - """ - conduit.save_unit(self._unit) - - @property - def storage_path(self): - """ - Return the storage path of the Unit that underlies this ISO. - """ - return self._unit.storage_path - - def validate(self, full_validation=True): + name = mongoengine.StringField(required=True) + checksum = mongoengine.StringField(required=True) + size = mongoengine.IntField(required=True) + + # For backward compatibility + _ns = mongoengine.StringField(default='units_iso') + unit_type_id = mongoengine.StringField(db_field='_content_type_id', required=True, + default='iso') + + unit_key_fields = ('name', 'checksum', 'size') + + meta = { + 'indexes': [ + # Unit key Index + { + 'fields': ('name', 'checksum', 'size'), + 'unique': True + }], + 'collection': 'units_iso', + 'allow_inheritance': False} + + SERIALIZER = serializers.ISO + + # TYPE = ids.TYPE_ID_ISO + # UNIT_KEY_ISO = ('name', 'size', 'checksum') + + # def __init__(self, name, size, checksum, unit=None): + # """ + # Initialize an ISO, with its name, size, and checksum. + # + # :param name: The name of the ISO + # :type name: basestring + # :param size: The size of the ISO, in bytes + # :type size: int + # :param checksum: The SHA-256 checksum of the ISO + # :type checksum: basestring + # """ + # self.name = name + # self.size = size + # self.checksum = checksum + # + # # This is the Unit that the ISO represents. An ISO doesn't always have a Unit backing it, + # # particularly during repository synchronization or ISO uploads when the ISOs are being + # # initialized. + # self._unit = unit + + # @classmethod + # def from_unit(cls, unit): + # """ + # Construct an ISO out of a Unit. + # """ + # return cls(unit.unit_key['name'], unit.unit_key['size'], unit.unit_key['checksum'], unit) + # + # def init_unit(self, conduit): + # """ + # Use the given conduit's init_unit() call to initialize a unit, and store the unit as + # self._unit. + # + # :param conduit: The conduit to call init_unit() to get a Unit. + # :type conduit: pulp.plugins.conduits.mixins.AddUnitMixin + # """ + # relative_path = os.path.join(self.name, self.checksum, str(self.size), self.name) + # unit_key = {'name': self.name, 'size': self.size, 'checksum': self.checksum} + # metadata = {} + # self._unit = conduit.init_unit(self.TYPE, unit_key, metadata, relative_path) + + # def save_unit(self, conduit): + # """ + # Use the given conduit's save_unit() call to save self._unit. + # + # :param conduit: The conduit to call save_unit() with. + # :type conduit: pulp.plugins.conduits.mixins.AddUnitMixin + # """ + # conduit.save_unit(self._unit) + # + # @property + # def storage_path(self): + # """ + # Return the storage path of the Unit that underlies this ISO. + # """ + # return self._unit.storage_path + + def validate_iso(self, storage_path, full_validation=True): """ Validate that the name of the ISO is not the same as the manifest's name. Also, if full_validation is True, validate that the file found at self.storage_path matches the size @@ -426,17 +725,8 @@ def validate(self, full_validation=True): raise ValueError(msg) if full_validation: - - try: - destination_file = open(self.storage_path) - - except: - # Cannot have an else clause to the try without the except. - raise - - else: - try: - # Validate the size + with open(storage_path) as destination_file: + # Validate the size actual_size = self.calculate_size(destination_file) if actual_size != self.size: raise ValueError(_('Downloading <%(name)s> failed validation. ' @@ -456,9 +746,6 @@ def validate(self, full_validation=True): 'name': self.name, 'c': self.checksum, 'f': actual_checksum}) - finally: - destination_file.close() - @staticmethod def calculate_checksum(file_handle): """ @@ -511,7 +798,7 @@ def __init__(self, manifest_file, repo_url): self._isos = [] for unit in manifest_csv: name, checksum, size = unit - iso = ISO(name, int(size), checksum) + iso = ISO(name=name, size=int(size), checksum=checksum) # Take a URL onto the ISO so we know where we can get it iso.url = urljoin(repo_url, name) self._isos.append(iso) diff --git a/plugins/pulp_rpm/plugins/distributors/export_distributor/distributor.py b/plugins/pulp_rpm/plugins/distributors/export_distributor/distributor.py index 72784bdb6..26f2c8760 100644 --- a/plugins/pulp_rpm/plugins/distributors/export_distributor/distributor.py +++ b/plugins/pulp_rpm/plugins/distributors/export_distributor/distributor.py @@ -3,6 +3,7 @@ from pulp.common.config import read_json_config from pulp.plugins.distributor import Distributor +from pulp.server.db import model as platform_models from pulp.server.exceptions import PulpDataException from pulp_rpm.plugins.distributors.export_distributor import export_utils @@ -109,12 +110,12 @@ def set_progress(self, type_id, status, progress_callback=None): if progress_callback: progress_callback(type_id, status) - def publish_repo(self, repo, publish_conduit, config): + def publish_repo(self, transfer_repo, publish_conduit, config): """ Export a yum repository to a given directory, or to ISO - :param repo: metadata describing the repository - :type repo: pulp.plugins.model.Repository + :param transfer_repo: metadata describing the repository + :type transfer_repo: pulp.plugins.model.Repository :param publish_conduit: provides access to relevant Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: plugin configuration @@ -123,31 +124,35 @@ def publish_repo(self, repo, publish_conduit, config): :return: report describing the publish run :rtype: pulp.plugins.model.PublishReport """ + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) + # First, validate the configuration because there may be override config options, and # currently, validate_config is not called prior to publishing by the manager. valid_config, msg = export_utils.validate_export_config(config) if not valid_config: raise PulpDataException(msg) - _logger.info('Starting export of [%s]' % repo.id) + _logger.info('Starting export of [%s]' % repo.repo_id) self._publisher = ExportRepoPublisher(repo, publish_conduit, config, ids.TYPE_ID_DISTRIBUTOR_EXPORT) - return self._publisher.publish() + return self._publisher.process_lifecycle() - def distributor_removed(self, repo, config): + def distributor_removed(self, transfer_repo, config): """ Called when a distributor of this type is removed from a repository. - :param repo: metadata describing the repository - :type repo: pulp.plugins.model.Repository + :param transfer_repo: metadata describing the repository + :type transfer_repo: pulp.plugins.model.Repository :param config: plugin configuration :type config: pulp.plugins.config.PluginCallConfiguration """ + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) + # remove the directories that might have been created for this repo/distributor dir_list = [configuration.get_master_publish_dir(repo, ids.TYPE_ID_DISTRIBUTOR_EXPORT), - os.path.join(configuration.HTTP_EXPORT_DIR, repo.id), - os.path.join(configuration.HTTPS_EXPORT_DIR, repo.id)] + os.path.join(configuration.HTTP_EXPORT_DIR, repo.repo_id), + os.path.join(configuration.HTTPS_EXPORT_DIR, repo.repo_id)] for repo_dir in dir_list: shutil.rmtree(repo_dir, ignore_errors=True) diff --git a/plugins/pulp_rpm/plugins/distributors/export_distributor/groupdistributor.py b/plugins/pulp_rpm/plugins/distributors/export_distributor/groupdistributor.py index 2742de76d..40bd06284 100644 --- a/plugins/pulp_rpm/plugins/distributors/export_distributor/groupdistributor.py +++ b/plugins/pulp_rpm/plugins/distributors/export_distributor/groupdistributor.py @@ -7,7 +7,6 @@ from pulp.server.exceptions import PulpDataException from pulp_rpm.common import ids -from pulp_rpm.plugins.db import models from pulp_rpm.plugins.distributors.export_distributor import export_utils from pulp_rpm.plugins.distributors.yum import configuration from pulp_rpm.yum_plugin import util @@ -53,9 +52,13 @@ def metadata(cls): return { 'id': ids.TYPE_ID_DISTRIBUTOR_GROUP_EXPORT, 'display_name': _('Group Export Distributor'), - 'types': [models.RPM.TYPE, models.SRPM.TYPE, models.DRPM.TYPE, models.Errata.TYPE, - models.Distribution.TYPE, models.PackageCategory.TYPE, - models.PackageGroup.TYPE] + 'types': [ids.TYPE_ID_RPM, + ids.TYPE_ID_SRPM, + ids.TYPE_ID_DRPM, + ids.TYPE_ID_ERRATA, + ids.TYPE_ID_DISTRO, + ids.TYPE_ID_PKG_CATEGORY, + ids.TYPE_ID_PKG_GROUP] } def validate_config(self, repo_group, config, config_conduit): @@ -110,7 +113,7 @@ def publish_group(self, repo_group, publish_conduit, config): _logger.info('Beginning export of the following repository group: [%s]' % repo_group.id) self._publisher = ExportRepoGroupPublisher(repo_group, publish_conduit, config, ids.TYPE_ID_DISTRIBUTOR_GROUP_EXPORT) - return self._publisher.publish() + return self._publisher.process_lifecycle() def cancel_publish_repo(self): """ diff --git a/plugins/pulp_rpm/plugins/distributors/iso_distributor/distributor.py b/plugins/pulp_rpm/plugins/distributors/iso_distributor/distributor.py index 047348e23..a023079a7 100644 --- a/plugins/pulp_rpm/plugins/distributors/iso_distributor/distributor.py +++ b/plugins/pulp_rpm/plugins/distributors/iso_distributor/distributor.py @@ -1,5 +1,7 @@ import os +from pulp.server.controllers import repository as repo_controller +from pulp.server.db import model as platform_models from pulp.plugins.file.distributor import FileDistributor from pulp_rpm.common import ids @@ -39,6 +41,24 @@ def metadata(cls): def validate_config(self, repo, config, config_conduit): return configuration.validate(config) + def publish_repo(self, transfer_repo, publish_conduit, config): + """ + Publish the repository. + + :param transfer_repo: metadata describing the repo + :type transfer_repo: pulp.plugins.model.Repository + :param publish_conduit: The conduit for publishing a repo + :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit + :param config: plugin configuration + :type config: pulp.plugins.config.PluginConfiguration + :param config_conduit: Configuration Conduit; + :type config_conduit: pulp.plugins.conduits.repo_validate.RepoConfigConduit + :return: report describing the publish operation + :rtype: pulp.plugins.model.PublishReport + """ + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) + return super(ISODistributor, self).publish_repo(repo, publish_conduit, config) + def unpublish_repo(self, repo, config): """ Perform actions necessary when upublishing a repo @@ -46,7 +66,7 @@ def unpublish_repo(self, repo, config): Please also see the superclass method definition for more documentation on this method. :param repo: metadata describing the repository - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository :param config: plugin configuration :type config: pulp.plugins.config.PluginCallConfiguration @@ -59,7 +79,7 @@ def get_hosting_locations(self, repo, config): Get the paths on the filesystem where the build directory should be copied :param repo: The repository that is going to be hosted - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository :param config: plugin configuration :type config: pulp.plugins.config.PluginConfiguration :return : list of paths on the filesystem where the build directory should be copied @@ -68,7 +88,7 @@ def get_hosting_locations(self, repo, config): hosting_locations = [] # Publish the HTTP portion, if applicable - http_dest_dir = os.path.join(constants.ISO_HTTP_DIR, repo.id) + http_dest_dir = os.path.join(constants.ISO_HTTP_DIR, repo.repo_id) serve_http = config.get_boolean(constants.CONFIG_SERVE_HTTP) serve_http = serve_http if serve_http is not None else constants.CONFIG_SERVE_HTTP_DEFAULT @@ -77,7 +97,7 @@ def get_hosting_locations(self, repo, config): # Publish the HTTPs portion, if applicable if self._is_https_supported(config): - https_dest_dir = os.path.join(constants.ISO_HTTPS_DIR, repo.id) + https_dest_dir = os.path.join(constants.ISO_HTTPS_DIR, repo.repo_id) hosting_locations.append(https_dest_dir) return hosting_locations @@ -88,7 +108,7 @@ def post_repo_publish(self, repo, config): been moved into place on the filesystem :param repo: The repository that is going to be hosted - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository :param config: the configuration for the repository :type config: pulp.plugins.config.PluginCallConfiguration """ @@ -112,3 +132,15 @@ def _is_https_supported(self, config): constants.CONFIG_SERVE_HTTPS_DEFAULT return serve_https + + def get_units(self, repo, publish_conduit): + """ + :param repo: metadata describing the repo + :type repo: pulp.plugins.model.Repository + :param publish_conduit: The conduit for publishing a repo + :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit + + :return: Return an iterable of units + :rtype: iterable of units + """ + return repo_controller.find_repo_content_units(repo, yield_content_unit=True) \ No newline at end of file diff --git a/plugins/pulp_rpm/plugins/distributors/iso_distributor/publish.py b/plugins/pulp_rpm/plugins/distributors/iso_distributor/publish.py index 92bed8c96..411ca2e5c 100644 --- a/plugins/pulp_rpm/plugins/distributors/iso_distributor/publish.py +++ b/plugins/pulp_rpm/plugins/distributors/iso_distributor/publish.py @@ -43,11 +43,11 @@ def _get_relative_path(repo): Return the relative path for a particular repository. :param repo: The repo we need hosting locations for - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository :return: relative path for the repo :rtype: basestring """ - return repo.id + return repo.repo_id def _get_repository_protection_utils(): @@ -70,7 +70,7 @@ def remove_repository_protection(repo): Remove repository protection from the given repository. :param repo: The repository to remove protection from - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository """ protected_repo_utils = _get_repository_protection_utils()[1] relative_path = _get_relative_path(repo) diff --git a/plugins/pulp_rpm/plugins/distributors/yum/configuration.py b/plugins/pulp_rpm/plugins/distributors/yum/configuration.py index e9fc298df..448c91329 100644 --- a/plugins/pulp_rpm/plugins/distributors/yum/configuration.py +++ b/plugins/pulp_rpm/plugins/distributors/yum/configuration.py @@ -58,7 +58,7 @@ def validate_config(repo, config, config_conduit): Validate the prospective configuration instance for the the give repository. :param repo: repository to validate the config for - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository :param config: configuration instance to validate :type config: pulp.plugins.config.PluginCallConfiguration :param config_conduit: conduit providing access to relevant Pulp functionality @@ -140,7 +140,7 @@ def process_cert_based_auth(repo, config): Write the CA and Cert files in the PKI, if present. Remove them, if not. :param repo: repository to validate the config for - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository :param config: configuration instance to validate :type config: pulp.plugins.config.PluginCallConfiguration or dict """ @@ -160,8 +160,8 @@ def process_cert_based_auth(repo, config): repo_cert_utils_instance = repo_cert_utils.RepoCertUtils(auth_config) bundle = {'ca': auth_ca, 'cert': auth_cert} - repo_cert_utils_instance.write_consumer_cert_bundle(repo.id, bundle) - protected_repo_utils_instance.add_protected_repo(relative_path, repo.id) + repo_cert_utils_instance.write_consumer_cert_bundle(repo.repo_id, bundle) + protected_repo_utils_instance.add_protected_repo(relative_path, repo.repo_id) def remove_cert_based_auth(repo, config): @@ -169,7 +169,7 @@ def remove_cert_based_auth(repo, config): Remove the CA and Cert files in the PKI :param repo: repository to validate the config for - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository :param config: configuration instance to validate :type config: pulp.plugins.config.PluginCallConfiguration or dict """ @@ -184,14 +184,14 @@ def get_master_publish_dir(repo, distributor_type): Get the master publishing directory for the given repository. :param repo: repository to get the master publishing directory for - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository :param distributor_type: The type id of distributor that is being published :type distributor_type: str :return: master publishing directory for the given repository :rtype: str """ - return os.path.join(MASTER_PUBLISH_DIR, distributor_type, repo.id) + return os.path.join(MASTER_PUBLISH_DIR, distributor_type, repo.repo_id) def get_export_repo_publish_dirs(repo, config): @@ -199,7 +199,7 @@ def get_export_repo_publish_dirs(repo, config): Get the web publishing directories for a repo export :param repo: repository to get the master publishing directory for - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository :param config: configuration instance :type config: pulp.plugins.config.PluginCallConfiguration :return: list of publishing locations on disk @@ -207,9 +207,9 @@ def get_export_repo_publish_dirs(repo, config): """ publish_dirs = [] if config.get(PUBLISH_HTTP_KEYWORD): - publish_dirs.append(os.path.join(HTTP_EXPORT_DIR, repo.id)) + publish_dirs.append(os.path.join(HTTP_EXPORT_DIR, repo.repo_id)) if config.get(PUBLISH_HTTPS_KEYWORD): - publish_dirs.append(os.path.join(HTTPS_EXPORT_DIR, repo.id)) + publish_dirs.append(os.path.join(HTTPS_EXPORT_DIR, repo.repo_id)) return publish_dirs @@ -290,7 +290,7 @@ def get_repo_relative_path(repo, config=None): """ config = config or {} - relative_path = config.get('relative_url', repo.id) or repo.id + relative_path = config.get('relative_url', repo.repo_id) or repo.repo_id if relative_path.startswith('/'): relative_path = relative_path[1:] @@ -462,7 +462,7 @@ def _validate_usable_directory(key, path, error_messages): def _check_for_relative_path_conflicts(repo, config, config_conduit, error_messages): relative_path = get_repo_relative_path(repo, config) conflicting_distributors = config_conduit.get_repo_distributors_by_relative_url(relative_path, - repo.id) + repo.repo_id) # in all honesty, this loop should execute at most once # but it may be interesting/useful for erroneous situations for distributor in conflicting_distributors: @@ -475,6 +475,6 @@ def _check_for_relative_path_conflicts(repo, config, config_conduit, error_messa else: msg = _('Relative URL [{relative_path}] for repository [{repo_id}] conflicts with ' 'repo id for existing repository [{conflict_repo}]') - error_messages.append(msg.format(relative_path=relative_path, repo_id=repo.id, + error_messages.append(msg.format(relative_path=relative_path, repo_id=repo.repo_id, conflict_url=conflicting_relative_url, conflict_repo=conflicting_repo_id)) diff --git a/plugins/pulp_rpm/plugins/distributors/yum/distributor.py b/plugins/pulp_rpm/plugins/distributors/yum/distributor.py index 4f9c6cfe8..37e8f116a 100644 --- a/plugins/pulp_rpm/plugins/distributors/yum/distributor.py +++ b/plugins/pulp_rpm/plugins/distributors/yum/distributor.py @@ -4,7 +4,8 @@ from pulp.common.config import read_json_config from pulp.plugins.distributor import Distributor -from pulp.server import config as pulp_server_config +from pulp.server.config import config as pulp_server_config +from pulp.server.db import model as platform_models import pulp_rpm.common.constants as constants from pulp_rpm.common.ids import ( @@ -58,16 +59,16 @@ def metadata(cls): TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY, TYPE_ID_DISTRO, TYPE_ID_YUM_REPO_METADATA_FILE]} - def validate_config(self, repo, config, config_conduit): + def validate_config(self, transfer_repo, config, config_conduit): """ Allows the distributor to check the contents of a potential configuration for the given repository. This call is made both for the addition of this distributor to a new repository as well as updating the configuration for this distributor on a previously configured repository. - :param repo: metadata describing the repository to which the - configuration applies - :type repo: pulp.plugins.model.Repository + :param transfer_repo: metadata describing the repository to which the + configuration applies + :type transfer_repo: pulp.plugins.model.Repository :param config: plugin configuration instance; the proposed repo configuration is found within @@ -79,20 +80,22 @@ def validate_config(self, repo, config, config_conduit): :return: tuple of (bool, str) to describe the result :rtype: tuple """ - _logger.debug('Validating yum repository configuration: %s' % repo.id) + _logger.debug('Validating yum repository configuration: %s' % transfer_repo.id) + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) return configuration.validate_config(repo, config, config_conduit) - def distributor_removed(self, repo, config): + def distributor_removed(self, transfer_repo, config): """ Called when a distributor of this type is removed from a repository. - :param repo: metadata describing the repository - :type repo: pulp.plugins.model.Repository + :param transfer_repo: metadata describing the repository + :type transfer_repo: pulp.plugins.model.Repository :param config: plugin configuration :type config: pulp.plugins.config.PluginCallConfiguration """ + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) # remove the directories that might have been created for this repo/distributor repo_dir = configuration.get_master_publish_dir(repo, TYPE_ID_DISTRIBUTOR_YUM) @@ -149,12 +152,12 @@ def clean_simple_hosting_directories(self, start_location, containing_dir): self.clean_simple_hosting_directories(up_dir, containing_dir) - def publish_repo(self, repo, publish_conduit, config): + def publish_repo(self, transfer_repo, publish_conduit, config): """ Publishes the given repository. - :param repo: metadata describing the repository - :type repo: pulp.plugins.model.Repository + :param transfer_repo: metadata describing the repository + :type transfer_repo: pulp.plugins.model.Repository :param publish_conduit: provides access to relevant Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit @@ -165,10 +168,12 @@ def publish_repo(self, repo, publish_conduit, config): :return: report describing the publish run :rtype: pulp.plugins.model.PublishReport """ - _logger.debug('Publishing yum repository: %s' % repo.id) + _logger.debug('Publishing yum repository: %s' % transfer_repo.id) + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) + repo.repo_id = transfer_repo.id self._publisher = publish.Publisher(repo, publish_conduit, config, TYPE_ID_DISTRIBUTOR_YUM) - return self._publisher.publish() + return self._publisher.process_lifecycle() def cancel_publish_repo(self): """ @@ -180,14 +185,14 @@ def cancel_publish_repo(self): if self._publisher is not None: self._publisher.cancel() - def create_consumer_payload(self, repo, config, binding_config): + def create_consumer_payload(self, transfer_repo, config, binding_config): """ Called when a consumer binds to a repository using this distributor. This call should return a dictionary describing all data the consumer will need to access the repository. - :param repo: metadata describing the repository - :type repo: pulp.plugins.model.Repository + :param transfer_repo: metadata describing the repository + :type transfer_repo: pulp.plugins.model.Repository :param config: plugin configuration :type config: pulp.plugins.config.PluginCallConfiguration @@ -200,6 +205,8 @@ def create_consumer_payload(self, repo, config, binding_config): :return: dictionary of relevant data :rtype: dict """ + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) + payload = dict() payload['repo_name'] = repo.display_name payload['server_name'] = pulp_server_config.config.get('server', 'server_name') diff --git a/plugins/pulp_rpm/plugins/distributors/yum/metadata/filelists.py b/plugins/pulp_rpm/plugins/distributors/yum/metadata/filelists.py index 6248bf4cf..03e8200f3 100644 --- a/plugins/pulp_rpm/plugins/distributors/yum/metadata/filelists.py +++ b/plugins/pulp_rpm/plugins/distributors/yum/metadata/filelists.py @@ -37,9 +37,9 @@ def add_unit_metadata(self, unit): Add the metadata to the xml file for the given unit. :param unit: unit whose metadata is to be written - :type unit: pulp.plugins.model.Unit + :type unit: pulp_rpm.plugins.db.models.RpmBase """ - metadata = unit.metadata['repodata']['filelists'] + metadata = unit.repodata['filelists'] if isinstance(metadata, unicode): metadata = metadata.encode('utf-8') self.metadata_file_handle.write(metadata) diff --git a/plugins/pulp_rpm/plugins/distributors/yum/metadata/other.py b/plugins/pulp_rpm/plugins/distributors/yum/metadata/other.py index 49d422004..365204154 100644 --- a/plugins/pulp_rpm/plugins/distributors/yum/metadata/other.py +++ b/plugins/pulp_rpm/plugins/distributors/yum/metadata/other.py @@ -37,9 +37,9 @@ def add_unit_metadata(self, unit): Add the metadata to primary.xml.gz for the given unit. :param unit: unit whose metadata is to be written - :type unit: pulp.plugins.model.Unit + :type unit: pulp_rpm.plugins.db.models.RpmBase """ - metadata = unit.metadata['repodata']['other'] + metadata = unit.repodata['other'] if isinstance(metadata, unicode): metadata = metadata.encode('utf-8') self.metadata_file_handle.write(metadata) diff --git a/plugins/pulp_rpm/plugins/distributors/yum/metadata/package.py b/plugins/pulp_rpm/plugins/distributors/yum/metadata/package.py index 1fb62404d..3a935f69a 100644 --- a/plugins/pulp_rpm/plugins/distributors/yum/metadata/package.py +++ b/plugins/pulp_rpm/plugins/distributors/yum/metadata/package.py @@ -70,49 +70,41 @@ def add_package_group_unit_metadata(self, group_unit): """ Write out the XML representation of a group - :param group_unit: AssociatedUnit of the group to publish - :type group_unit: AssociatedUnit + :param group_unit: the group to publish + :type group_unit: pulp_rpm.plugins.db.models.PackageGroup """ group_element = ElementTree.Element('group') - ElementTree.SubElement(group_element, 'id').text = group_unit.unit_key['id'] + ElementTree.SubElement(group_element, 'id').text = group_unit.package_group_id ElementTree.SubElement(group_element, 'default').text = \ - str(group_unit.metadata['default']).lower() + str(group_unit.default).lower() ElementTree.SubElement(group_element, 'uservisible').text = \ - str(group_unit.metadata['user_visible']).lower() + str(group_unit.user_visible).lower() ElementTree.SubElement(group_element, 'display_order').text = \ - str(group_unit.metadata['display_order']) - - if 'langonly' in group_unit.metadata: - ElementTree.SubElement(group_element, 'langonly').text = \ - group_unit.metadata['langonly'] - ElementTree.SubElement(group_element, 'name').text = \ - group_unit.metadata['name'] - self._write_translated_fields(group_element, 'name', - group_unit.metadata.get('translated_name')) - ElementTree.SubElement(group_element, 'description').text = \ - group_unit.metadata['description'] + str(group_unit.display_order) + + if group_unit.langonly: + ElementTree.SubElement(group_element, 'langonly').text = group_unit.langonly + ElementTree.SubElement(group_element, 'name').text = group_unit.name + self._write_translated_fields(group_element, 'name', group_unit.translated_name) + ElementTree.SubElement(group_element, 'description').text = group_unit.description self._write_translated_fields(group_element, 'description', - group_unit.metadata.get('translated_description')) + group_unit.translated_description) package_list_element = ElementTree.SubElement(group_element, 'packagelist') - if 'mandatory_package_names' in group_unit.metadata and \ - group_unit.metadata['mandatory_package_names']: - for pkg in sorted(group_unit.metadata['mandatory_package_names']): + if group_unit.mandatory_package_names: + for pkg in sorted(group_unit.mandatory_package_names): ElementTree.SubElement(package_list_element, 'packagereq', {'type': 'mandatory'}).text = pkg - if 'default_package_names' in group_unit.metadata and \ - group_unit.metadata['default_package_names']: - for pkg in sorted(group_unit.metadata['default_package_names']): + if group_unit.default_package_names: + for pkg in sorted(group_unit.default_package_names): ElementTree.SubElement(package_list_element, 'packagereq', {'type': 'default'}).text = pkg - if 'optional_package_names' in group_unit.metadata and \ - group_unit.metadata['optional_package_names']: - for pkg in sorted(group_unit.metadata['optional_package_names']): + if group_unit.optional_package_names: + for pkg in sorted(group_unit.optional_package_names): ElementTree.SubElement(package_list_element, 'packagereq', {'type': 'optional'}).text = pkg - if 'conditional_package_names' in group_unit.metadata and \ - group_unit.metadata['conditional_package_names']: - for pkg_name, value in group_unit.metadata['conditional_package_names']: + if group_unit.conditional_package_names: + for pkg_name, value in group_unit.conditional_package_names: ElementTree.SubElement(package_list_element, 'packagereq', {'type': 'conditional', 'requires': value}).text = pkg_name @@ -125,28 +117,21 @@ def add_package_category_unit_metadata(self, unit): """ Write out the XML representation of a category unit - :param group_unit: AssociatedUnit of the category o publish - :type group_unit: AssociatedUnit + :param unit: The category to publish + :type unit: pulp_rpm.plugins.db.models.PackageCategory """ category_element = ElementTree.Element('category') - category_id = unit.unit_key["id"] - if category_id is None: - category_id = unit.metadata['id'] - ElementTree.SubElement(category_element, 'id').text = category_id + ElementTree.SubElement(category_element, 'id').text = unit.package_category_id ElementTree.SubElement(category_element, 'display_order').text = \ - str(unit.metadata['display_order']) - ElementTree.SubElement(category_element, 'name').text = \ - unit.metadata['name'] - self._write_translated_fields(category_element, 'name', - unit.metadata.get('translated_name')) - ElementTree.SubElement(category_element, 'description').text = \ - unit.metadata['description'] - self._write_translated_fields(category_element, 'description', - unit.metadata.get('translated_description')) + str(unit.display_order) + ElementTree.SubElement(category_element, 'name').text = unit.name + self._write_translated_fields(category_element, 'name', unit.translated_name) + ElementTree.SubElement(category_element, 'description').text = unit.description + self._write_translated_fields(category_element, 'description', unit.translated_description) group_list_element = ElementTree.SubElement(category_element, 'grouplist') - if 'packagegroupids' in unit.metadata and unit.metadata['packagegroupids']: - for groupid in sorted(unit.metadata['packagegroupids']): + if unit.group_ids: + for groupid in sorted(unit.group_ids): ElementTree.SubElement(group_list_element, 'groupid').text = groupid # Write out the category xml to the file @@ -158,34 +143,28 @@ def add_package_environment_unit_metadata(self, unit): """ Write out the XML representation of a environment group unit - :param unit: AssociatedUnit of the environment group to publish - :type unit: AssociatedUnit - """ + :param unit: The environment group to publish + :type unit: pulp_rpm.plugins.db.models.PackageEnvironment + """ environment_element = ElementTree.Element('environment') - environment_id = unit.unit_key["id"] - if environment_id is None: - environment_id = unit.metadata['id'] - ElementTree.SubElement(environment_element, 'id').text = environment_id + ElementTree.SubElement(environment_element, 'id').text = unit.package_environment_id ElementTree.SubElement(environment_element, 'display_order').text = \ - str(unit.metadata['display_order']) - ElementTree.SubElement(environment_element, 'name').text = \ - unit.metadata['name'] - self._write_translated_fields(environment_element, 'name', - unit.metadata.get('translated_name')) - ElementTree.SubElement(environment_element, 'description').text = \ - unit.metadata['description'] + str(unit.display_order) + ElementTree.SubElement(environment_element, 'name').text = unit.name + self._write_translated_fields(environment_element, 'name', unit.translated_name) + ElementTree.SubElement(environment_element, 'description').text = unit.description self._write_translated_fields(environment_element, 'description', - unit.metadata.get('translated_description')) + unit.translated_description) group_list_element = ElementTree.SubElement(environment_element, 'grouplist') - if 'group_ids' in unit.metadata and unit.metadata['group_ids']: - for groupid in sorted(unit.metadata['group_ids']): + if unit.group_ids: + for groupid in sorted(unit.group_ids): ElementTree.SubElement(group_list_element, 'groupid').text = groupid option_list_element = ElementTree.SubElement(environment_element, 'optionlist') - if 'options' in unit.metadata and unit.metadata['options']: - for option in sorted(unit.metadata['options']): + if unit.options: + for option in sorted(unit.options): if option['default']: ElementTree.SubElement(option_list_element, 'groupid', {'default': 'true'}).text = option['group'] diff --git a/plugins/pulp_rpm/plugins/distributors/yum/metadata/primary.py b/plugins/pulp_rpm/plugins/distributors/yum/metadata/primary.py index 92fa62ca5..94343c49f 100644 --- a/plugins/pulp_rpm/plugins/distributors/yum/metadata/primary.py +++ b/plugins/pulp_rpm/plugins/distributors/yum/metadata/primary.py @@ -49,9 +49,9 @@ def add_unit_metadata(self, unit): Add the metadata to primary.xml.gz for the given unit. :param unit: unit whose metadata is to be written - :type unit: pulp.plugins.model.Unit + :type unit: pulp_rpm.plugins.db.models.RpmBase """ - metadata = unit.metadata['repodata']['primary'] + metadata = unit.repodata['primary'] if isinstance(metadata, unicode): metadata = metadata.encode('utf-8') self.metadata_file_handle.write(metadata) diff --git a/plugins/pulp_rpm/plugins/distributors/yum/metadata/updateinfo.py b/plugins/pulp_rpm/plugins/distributors/yum/metadata/updateinfo.py index 8b32c0595..532c9facc 100644 --- a/plugins/pulp_rpm/plugins/distributors/yum/metadata/updateinfo.py +++ b/plugins/pulp_rpm/plugins/distributors/yum/metadata/updateinfo.py @@ -1,8 +1,9 @@ import os from xml.etree import ElementTree -from pulp_rpm.plugins.distributors.yum.metadata.metadata import ( - MetadataFileContext, REPO_DATA_DIR_NAME) +from pulp.plugins.util.metadata_writer import XmlFileContext + +from pulp_rpm.plugins.distributors.yum.metadata.metadata import REPO_DATA_DIR_NAME from pulp_rpm.yum_plugin import util @@ -11,57 +12,42 @@ UPDATE_INFO_XML_FILE_NAME = 'updateinfo.xml.gz' -class UpdateinfoXMLFileContext(MetadataFileContext): +class UpdateinfoXMLFileContext(XmlFileContext): def __init__(self, working_dir, checksum_type=None): - metadata_file_path = os.path.join(working_dir, REPO_DATA_DIR_NAME, UPDATE_INFO_XML_FILE_NAME) - super(UpdateinfoXMLFileContext, self).__init__(metadata_file_path, checksum_type) - - def _write_root_tag_open(self): - - updates_element = ElementTree.Element('updates') - bogus_element = ElementTree.SubElement(updates_element, '') - - updates_tags_string = ElementTree.tostring(updates_element, 'utf-8') - bogus_tag_string = ElementTree.tostring(bogus_element, 'utf-8') - opening_tag, closing_tag = updates_tags_string.split(bogus_tag_string, 1) - - self.metadata_file_handle.write(opening_tag + '\n') - - def _write_root_tag_close_closure(*args): - self.metadata_file_handle.write(closing_tag + '\n') - - self._write_root_tag_close = _write_root_tag_close_closure + super(UpdateinfoXMLFileContext, self).__init__( + metadata_file_path, 'updates', checksum_type=checksum_type) - def add_unit_metadata(self, erratum_unit): + def add_unit_metadata(self, item): """ Write the XML representation of erratum_unit to self.metadata_file_handle (updateinfo.xml.gx). - :param erratum_unit: The erratum unit that should be written to updateinfo.xml. - :type erratum_unit: pulp.plugins.model.AssociatedUnit + :param item: The erratum unit that should be written to updateinfo.xml. + :type item: pulp_rpm.plugins.db.models.Errata """ - update_attributes = {'status': erratum_unit.metadata['status'], - 'type': erratum_unit.metadata['type'], - 'version': erratum_unit.metadata['version'], - 'from': erratum_unit.metadata.get('from', '') or ''} + erratum_unit = item + update_attributes = {'status': erratum_unit.status, + 'type': erratum_unit.type, + 'version': erratum_unit.version, + 'from': erratum_unit.errata_from or ''} update_element = ElementTree.Element('update', update_attributes) id_element = ElementTree.SubElement(update_element, 'id') - id_element.text = erratum_unit.unit_key['id'] + id_element.text = erratum_unit.errata_id - issued_attributes = {'date': erratum_unit.metadata['issued']} + issued_attributes = {'date': erratum_unit.issued} ElementTree.SubElement(update_element, 'issued', issued_attributes) reboot_element = ElementTree.SubElement(update_element, 'reboot_suggested') - reboot_element.text = str(erratum_unit.metadata['reboot_suggested']) + reboot_element.text = str(erratum_unit.reboot_suggested) # these elements are optional for key in ('title', 'release', 'rights', 'solution', 'severity', 'summary', 'pushcount'): - value = erratum_unit.metadata.get(key) + value = getattr(erratum_unit, key) if not value: continue @@ -72,14 +58,14 @@ def add_unit_metadata(self, erratum_unit): # these elements must be present even if text is empty for key in ('description',): - value = erratum_unit.metadata.get(key) + value = getattr(erratum_unit, key) if value is None: value = '' sub_element = ElementTree.SubElement(update_element, key) sub_element.text = unicode(value) - updated = erratum_unit.metadata.get('updated') + updated = erratum_unit.updated if updated: updated_attributes = {'date': updated} @@ -87,14 +73,14 @@ def add_unit_metadata(self, erratum_unit): references_element = ElementTree.SubElement(update_element, 'references') - for reference in erratum_unit.metadata.get('references'): + for reference in erratum_unit.references: reference_attributes = {'id': reference['id'] or '', 'title': reference['title'] or '', 'type': reference['type'], 'href': reference['href']} ElementTree.SubElement(references_element, 'reference', reference_attributes) - for pkglist in erratum_unit.metadata.get('pkglist', []): + for pkglist in erratum_unit.pkglist: pkglist_element = ElementTree.SubElement(update_element, 'pkglist') diff --git a/plugins/pulp_rpm/plugins/distributors/yum/publish.py b/plugins/pulp_rpm/plugins/distributors/yum/publish.py index a8e2692f9..116261adf 100644 --- a/plugins/pulp_rpm/plugins/distributors/yum/publish.py +++ b/plugins/pulp_rpm/plugins/distributors/yum/publish.py @@ -4,15 +4,15 @@ import os import subprocess +import mongoengine from pulp.common import dateutils from pulp.common.compat import json from pulp.plugins.config import PluginCallConfiguration from pulp.plugins.conduits.repo_publish import RepoPublishConduit -from pulp.plugins.util.publish_step import PublishStep, UnitPublishStep, CopyDirectoryStep,\ - CreatePulpManifestStep -from pulp.plugins.util.publish_step import AtomicDirectoryPublishStep +from pulp.plugins.util import misc as plugin_misc +from pulp.plugins.util import publish_step as platform_steps +from pulp.server.controllers import repository as repo_controller from pulp.server.db import model -from pulp.server.db.model.criteria import UnitAssociationCriteria from pulp.server.exceptions import InvalidValue, PulpCodedException from pulp_rpm.common import constants @@ -39,7 +39,7 @@ '_storage_path', 'checksum', 'checksumtype', 'repodata'] -class BaseYumRepoPublisher(PublishStep): +class BaseYumRepoPublisher(platform_steps.PluginStep): """ Yum HTTP/HTTPS publisher class that is responsible for the actual publishing of a yum repository over HTTP and/or HTTPS. @@ -59,12 +59,12 @@ def __init__(self, repo, publish_conduit, config, distributor_type, association_ :param association_filters: Any filters to be applied to the list of RPMs being published, See pulp.server.db.model.criteria.UnitAssociationCriteria for details on what can be included in the association_filters - :type association_filters: dict + :type association_filters: mongoengine.Q """ super(BaseYumRepoPublisher, self).__init__(constants.PUBLISH_REPO_STEP, repo, publish_conduit, config, - distributor_type=distributor_type, **kwargs) + plugin_type=distributor_type, **kwargs) self.repomd_file_context = None self.checksum_type = None @@ -126,7 +126,7 @@ def __init__(self, repo, publish_conduit, config, distributor_type, **kwargs): if export_dir: target_dir = os.path.join(export_dir, configuration.get_repo_relative_path(repo, config)) - self.add_child(CopyDirectoryStep(working_directory, target_dir)) + self.add_child(platform_steps.CopyDirectoryStep(working_directory, target_dir)) self.add_child(GenerateListingFileStep(export_dir, target_dir)) else: # Reset the steps to use an internal scratch directory other than the base working dir @@ -140,7 +140,7 @@ def __init__(self, repo, publish_conduit, config, distributor_type, **kwargs): realized_dir = os.path.join(working_directory, 'realized') copy_target = os.path.join(realized_dir, configuration.get_repo_relative_path(repo, config)) - self.add_child(CopyDirectoryStep(content_dir, copy_target)) + self.add_child(platform_steps.CopyDirectoryStep(content_dir, copy_target)) self.add_child(GenerateListingFileStep(realized_dir, copy_target)) # Create the steps to generate the ISO and publish them to their final location @@ -149,19 +149,20 @@ def __init__(self, repo, publish_conduit, config, distributor_type, **kwargs): # create the PULP_MANIFEST file if requested in the config if config.get_boolean(constants.CREATE_PULP_MANIFEST) is True: - self.add_child(CreatePulpManifestStep(output_dir)) + self.add_child(platform_steps.CreatePulpManifestStep(output_dir)) publish_location = [('/', location) for location in configuration.get_export_repo_publish_dirs(repo, config)] - master_dir = configuration.get_master_publish_dir(repo, self.get_distributor_type()) - atomic_publish = AtomicDirectoryPublishStep(output_dir, publish_location, master_dir) + master_dir = configuration.get_master_publish_dir(repo, self.get_plugin_type()) + atomic_publish = platform_steps.AtomicDirectoryPublishStep( + output_dir, publish_location, master_dir) atomic_publish.description = _('Moving ISO to final location') self.add_child(atomic_publish) -class ExportRepoGroupPublisher(PublishStep): +class ExportRepoGroupPublisher(platform_steps.PluginStep): def __init__(self, repo_group, publish_conduit, config, distributor_type): """ @@ -177,7 +178,7 @@ def __init__(self, repo_group, publish_conduit, config, distributor_type): super(ExportRepoGroupPublisher, self).__init__(constants.PUBLISH_STEP_EXPORT_REPO_GROUP, repo_group, publish_conduit, config, working_dir=repo_group.working_dir, - distributor_type=distributor_type) + plugin_type=distributor_type) working_dir = self.get_working_dir() scratch_dir = os.path.join(working_dir, 'scratch') @@ -201,11 +202,11 @@ def __init__(self, repo_group, publish_conduit, config, distributor_type): continue repo_config_copy = copy.deepcopy(repo_config) - repo.working_dir = os.path.join(scratch_dir, repo.id) - repo_conduit = RepoPublishConduit(repo.id, distributor_type) + repo.working_dir = os.path.join(scratch_dir, repo.repo_id) + repo_conduit = RepoPublishConduit(repo.repo_id, distributor_type) publisher = ExportRepoPublisher(repo, repo_conduit, repo_config_copy, distributor_type) - publisher.description = _("Exporting Repo: %s") % repo.id + publisher.description = _("Exporting Repo: %s") % repo.repo_id self.add_child(publisher) if empty_repos: os.makedirs(realized_dir) @@ -219,13 +220,14 @@ def __init__(self, repo_group, publish_conduit, config, distributor_type): # create the PULP_MANIFEST file if requested in the config if config.get_boolean(constants.CREATE_PULP_MANIFEST) is True: - self.add_child(CreatePulpManifestStep(output_dir)) + self.add_child(platform_steps.CreatePulpManifestStep(output_dir)) export_dirs = configuration.get_export_repo_group_publish_dirs(repo_group, config) publish_location = [('/', location) for location in export_dirs] master_dir = configuration.get_master_publish_dir(repo_group, distributor_type) - self.add_child(AtomicDirectoryPublishStep(output_dir, publish_location, master_dir)) + self.add_child(platform_steps.AtomicDirectoryPublishStep(output_dir, publish_location, + master_dir)) class Publisher(BaseYumRepoPublisher): @@ -252,13 +254,10 @@ def __init__(self, repo, publish_conduit, config, distributor_type, **kwargs): last_deleted = repo.last_unit_removed date_filter = None - insert_step = None if last_published and \ ((last_deleted and last_published > last_deleted) or not last_deleted): # Add the step to copy the current published directory into place - working_dir = repo.working_dir specific_master = None - if config.get(constants.PUBLISH_HTTPS_KEYWORD): root_publish_dir = configuration.get_https_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) @@ -270,17 +269,16 @@ def __init__(self, repo, publish_conduit, config, distributor_type, **kwargs): # Only do an incremental publish if the previous publish can be found if os.path.exists(specific_master): - insert_step = CopyDirectoryStep(specific_master, working_dir, - preserve_symlinks=True) # Pass something useful to the super so that it knows the publish info string_date = dateutils.format_iso8601_datetime(last_published) - date_filter = export_utils.create_date_range_filter( - {constants.START_DATE_KEYWORD: string_date}) + date_filter = mongoengine.Q(created__gte=string_date) super(Publisher, self).__init__(repo, publish_conduit, config, distributor_type, association_filters=date_filter, **kwargs) - if insert_step: + if date_filter: + insert_step = platform_steps.CopyDirectoryStep( + specific_master, self.get_working_dir(), preserve_symlinks=True) self.insert_child(0, insert_step) self.rpm_step.fast_forward = True @@ -303,9 +301,8 @@ def __init__(self, repo, publish_conduit, config, distributor_type, **kwargs): listing_steps.append(GenerateListingFileStep(root_publish_dir, repo_publish_dir)) master_publish_dir = configuration.get_master_publish_dir(repo, distributor_type) - atomic_publish_step = AtomicDirectoryPublishStep(self.get_working_dir(), - target_directories, - master_publish_dir) + atomic_publish_step = platform_steps.AtomicDirectoryPublishStep( + self.get_working_dir(), target_directories, master_publish_dir) atomic_publish_step.description = _("Publishing files to web") self.add_child(atomic_publish_step) @@ -315,7 +312,7 @@ def __init__(self, repo, publish_conduit, config, distributor_type, **kwargs): self.add_child(step) -class GenerateListingFileStep(PublishStep): +class GenerateListingFileStep(platform_steps.PluginStep): def __init__(self, root_dir, target_dir, step=constants.PUBLISH_GENERATE_LISTING_FILE_STEP): """ Initialize and set the ID of the step @@ -325,11 +322,11 @@ def __init__(self, root_dir, target_dir, step=constants.PUBLISH_GENERATE_LISTING self.root_dir = root_dir self.target_dir = target_dir - def process_main(self): + def process_main(self, item=None): util.generate_listing_files(self.root_dir, self.target_dir) -class InitRepoMetadataStep(PublishStep): +class InitRepoMetadataStep(platform_steps.PluginStep): def __init__(self, step=constants.PUBLISH_INIT_REPOMD_STEP): """ @@ -344,7 +341,7 @@ def initialize(self): self.parent.repomd_file_context.initialize() -class CloseRepoMetadataStep(PublishStep): +class CloseRepoMetadataStep(platform_steps.PluginStep): def __init__(self, step=constants.PUBLISH_CLOSE_REPOMD_STEP): """ @@ -358,7 +355,7 @@ def finalize(self): self.parent.repomd_file_context.finalize() -class PublishRepoMetaDataStep(UnitPublishStep): +class PublishRepoMetaDataStep(platform_steps.UnitPublishStep): """ Step for managing overall repo metadata """ @@ -384,7 +381,7 @@ def finalize(self): self.repomd_file_context.finalize() -class PublishRpmStep(UnitPublishStep): +class PublishRpmStep(platform_steps.UnitPublishStep): """ Step for publishing RPM & SRPM units """ @@ -403,7 +400,7 @@ def initialize(self): """ Create each of the three metadata contexts required for publishing RPM & SRPM """ - total = self._get_total(ignore_filter=self.fast_forward) + total = self.get_total(ignore_filter=self.fast_forward) checksum_type = self.parent.get_checksum_type() self.file_lists_context = FilelistsXMLFileContext(self.get_working_dir(), total, @@ -434,26 +431,26 @@ def finalize(self): repomd.add_metadata_file_metadata('primary', self.primary_context.metadata_file_path, self.primary_context.checksum) - def process_unit(self, unit): + def process_main(self, item=None): """ Link the unit to the content directory and the package_dir - :param unit: The unit to process - :type unit: pulp.plugins.model.Unit + :param item: The item to process or none if this get_iterator is not defined + :type item: pulp_rpm.plugins.db.models.RPM or pulp_rpm.plugins.db.models.SRPM """ + unit = item source_path = unit.storage_path - relative_path = util.get_relpath_from_unit(unit) - destination_path = os.path.join(self.get_working_dir(), relative_path) - self._create_symlink(source_path, destination_path) + destination_path = os.path.join(self.get_working_dir(), unit.file_name) + plugin_misc.create_symlink(source_path, destination_path) for package_dir in self.dist_step.package_dirs: - destination_path = os.path.join(package_dir, relative_path) - self._create_symlink(source_path, destination_path) + destination_path = os.path.join(package_dir, unit.file_name) + plugin_misc.create_symlink(source_path, destination_path) for context in (self.file_lists_context, self.other_context, self.primary_context): context.add_unit_metadata(unit) -class PublishMetadataStep(UnitPublishStep): +class PublishMetadataStep(platform_steps.UnitPublishStep): """ Publish extra metadata files that are copied from another repo and not generated """ @@ -463,27 +460,28 @@ def __init__(self): TYPE_ID_YUM_REPO_METADATA_FILE) self.description = _('Publishing Metadata.') - def process_unit(self, unit): + def process_main(self, item=None): """ Copy the metadata file into place and add it tot he repomd file. - :param unit: The unit to process - :type unit: pulp.plugins.model.Unit + :param item: The unit to process + :type item: pulp.plugins.model.Unit """ + unit = item # Copy the file to the location on disk where the published repo is built publish_location_relative_path = os.path.join(self.get_working_dir(), REPO_DATA_DIR_NAME) metadata_file_name = os.path.basename(unit.storage_path) link_path = os.path.join(publish_location_relative_path, metadata_file_name) - self._create_symlink(unit.storage_path, link_path) + plugin_misc.create_symlink(unit.storage_path, link_path) # Add the proper relative reference to the metadata file to repomd self.parent.repomd_file_context.\ add_metadata_file_metadata(unit.unit_key['data_type'], link_path) -class PublishDrpmStep(UnitPublishStep): +class PublishDrpmStep(platform_steps.UnitPublishStep): """ Publish Delta RPMS """ @@ -511,27 +509,28 @@ def is_skipped(self): :rtype: bool """ # skip if there are no DRPMs. - if self._get_total() == 0: + if self.get_total() == 0: return True return super(PublishDrpmStep, self).is_skipped() - def process_unit(self, unit): + def process_main(self, item=None): """ Link the unit to the drpm content directory and update the prestodelta metadata file. - :param unit: The unit to process - :type unit: pulp.plugins.model.Unit + :param item: The unit to process + :type item: pulp.plugins.model.Unit """ + unit = item source_path = unit.storage_path unit_filename = os.path.basename(unit.unit_key['filename']) relative_path = os.path.join('drpms', unit_filename) destination_path = os.path.join(self.get_working_dir(), relative_path) - self._create_symlink(source_path, destination_path) + plugin_misc.create_symlink(source_path, destination_path) for package_dir in self.dist_step.package_dirs: destination_path = os.path.join(package_dir, relative_path) - self._create_symlink(source_path, destination_path) + plugin_misc.create_symlink(source_path, destination_path) self.context.add_unit_metadata(unit) def finalize(self): @@ -545,7 +544,7 @@ def finalize(self): self.context.checksum) -class PublishErrataStep(UnitPublishStep): +class PublishErrataStep(platform_steps.UnitPublishStep): """ Publish all errata """ @@ -554,7 +553,7 @@ def __init__(self, **kwargs): **kwargs) self.context = None self.description = _('Publishing Errata') - self.process_unit = None + self.process_main = None def initialize(self): """ @@ -566,7 +565,7 @@ def initialize(self): self.context.initialize() # set the self.process_unit method to the corresponding method on the # UpdateInfoXMLFileContext as there is no other processing to be done for each unit. - self.process_unit = self.context.add_unit_metadata + self.process_main = self.context.add_unit_metadata def finalize(self): """ @@ -579,7 +578,7 @@ def finalize(self): self.context.checksum) -class PublishRpmAndDrpmStepIncremental(UnitPublishStep): +class PublishRpmAndDrpmStepIncremental(platform_steps.UnitPublishStep): """ Publish all incremental errata """ @@ -590,22 +589,24 @@ def __init__(self, **kwargs): unit_fields=PACKAGE_FIELDS, **kwargs) self.description = _('Publishing RPM, SRPM, and DRPM') - def process_unit(self, unit): + def process_main(self, item=None): """ Link the unit to the content directory and the package_dir :param unit: The unit to process - :type unit: pulp.plugins.model.Unit + :type unit: pulp.server.db.model.ContentUnit """ + unit = item source_path = unit.storage_path relative_path = util.get_relpath_from_unit(unit) destination_path = os.path.join(self.get_working_dir(), relative_path) - self._create_symlink(source_path, destination_path) + plugin_misc.create_symlink(source_path, destination_path) filename = unit.unit_key['name'] + '-' + unit.unit_key['version'] + '-' + \ unit.unit_key['release'] + '.' + unit.unit_key['arch'] + '.json' path = os.path.join(self.get_working_dir(), filename) + # TODO REFACTOR FOR NEW MODEL BASED STUFF # Remove all keys that start with an underscore, like _id and _ns for key_to_remove in filter(lambda key: key[0] == '_', unit.metadata.keys()): unit.metadata.pop(key_to_remove) @@ -619,7 +620,7 @@ def process_unit(self, unit): json.dump(dict_to_write, f) -class PublishErrataStepIncremental(UnitPublishStep): +class PublishErrataStepIncremental(platform_steps.UnitPublishStep): """ Publish all incremental errata """ @@ -628,10 +629,18 @@ def __init__(self, **kwargs): TYPE_ID_ERRATA, **kwargs) self.description = _('Publishing Errata') - def process_unit(self, unit): + def process_main(self, item=None): + """ + :param item: the errata unit to process + :type item: pulp_rpm.plugins.db.models.Errata + """ + unit = item # Remove unnecessary keys, like _id for key_to_remove in filter(lambda key: key[0] == '_', unit.metadata.keys()): unit.metadata.pop(key_to_remove) + + # TODO List out all the metadata field by field + # TODO Could the to_json be used? errata_dict = { 'unit_key': unit.unit_key, 'unit_metadata': unit.metadata @@ -642,7 +651,7 @@ def process_unit(self, unit): json.dump(errata_dict, f) -class PublishCompsStep(UnitPublishStep): +class PublishCompsStep(platform_steps.UnitPublishStep): def __init__(self): super(PublishCompsStep, self).__init__(constants.PUBLISH_COMPS_STEP, [TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY, @@ -650,38 +659,41 @@ def __init__(self): self.comps_context = None self.description = _('Publishing Comps file') - def get_unit_generator(self): + def get_iterator(self): """ Returns a generator of Named Tuples containing the original unit and the processing method that will be used to process that particular unit. """ - # set the process unit method to categories - criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_CATEGORY]) - category_generator = self.get_conduit().get_units(criteria, as_generator=True) + category_generator = repo_controller.find_repo_content_units( + self.get_repo(), repo_content_unit_q=mongoengine.Q(unit_type_id=TYPE_ID_PKG_CATEGORY), + yield_content_unit=True) UnitProcessor = namedtuple('UnitProcessor', 'unit process') for category in category_generator: yield UnitProcessor(category, self.comps_context.add_package_category_unit_metadata) # set the process unit method to groups - criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP]) - groups_generator = self.get_conduit().get_units(criteria, as_generator=True) + groups_generator = repo_controller.find_repo_content_units( + self.get_repo(), repo_content_unit_q=mongoengine.Q(unit_type_id=TYPE_ID_PKG_GROUP), + yield_content_unit=True) for group in groups_generator: yield UnitProcessor(group, self.comps_context.add_package_group_unit_metadata) # set the process unit method to environments - criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_ENVIRONMENT]) - groups_generator = self.get_conduit().get_units(criteria, as_generator=True) + groups_generator = repo_controller.find_repo_content_units( + self.get_repo(), + repo_content_unit_q=mongoengine.Q(unit_type_id=TYPE_ID_PKG_ENVIRONMENT), + yield_content_unit=True) for group in groups_generator: yield UnitProcessor(group, self.comps_context.add_package_environment_unit_metadata) - def process_unit(self, unit): + def process_main(self, item=None): """ Process each unit created by the generator using the associated process command """ - unit.process(unit.unit) + item.process(item.unit) def initialize(self): """ @@ -703,7 +715,7 @@ def finalize(self): self.comps_context.checksum) -class PublishDistributionStep(UnitPublishStep): +class PublishDistributionStep(platform_steps.UnitPublishStep): """ Publish distribution files associated with the anaconda installer """ @@ -722,19 +734,20 @@ def initialize(self): """ When initializing the metadata verify that only one distribution exists """ - if self._get_total() > 1: + if self.get_total() > 1: msg = _('Error publishing repository %(repo)s. ' - 'More than one distribution found.') % {'repo': self.parent.repo.id} + 'More than one distribution found.') % {'repo': self.parent.repo.repo_id} logger.debug(msg) raise Exception(msg) - def process_unit(self, unit): + def process_main(self, item=None): """ Process the distribution unit - :param unit: The unit to process - :type unit: Unit + :param item: The unit to process + :type item pulp_rpm.plugins.db.models.Distribution """ + unit = item self._publish_distribution_treeinfo(unit) # create the Packages directory required for RHEL 5 @@ -752,7 +765,7 @@ def _publish_distribution_treeinfo(self, distribution_unit): :param distribution_unit: The unit for the distribution from which the list of files to be published should be pulled from. - :type distribution_unit: AssociatedUnit + :type distribution_unit: pulp_rpm.plugins.db.models.Distribution """ distribution_unit_storage_path = distribution_unit.storage_path src_treeinfo_path = None @@ -769,7 +782,7 @@ def _publish_distribution_treeinfo(self, distribution_unit): symlink_treeinfo_path = os.path.join(self.get_working_dir(), treeinfo_file_name) logger.debug("creating treeinfo symlink from %s to %s" % (src_treeinfo_path, symlink_treeinfo_path)) - self._create_symlink(src_treeinfo_path, symlink_treeinfo_path) + plugin_misc.create_symlink(src_treeinfo_path, symlink_treeinfo_path) def _publish_distribution_files(self, distribution_unit): """ @@ -778,23 +791,25 @@ def _publish_distribution_files(self, distribution_unit): :param distribution_unit: The unit for the distribution from which the list of files to be published should be pulled from. - :type distribution_unit: AssociatedUnit + :type distribution_unit: pulp_rpm.plugins.db.models.Distribution """ - if 'files' not in distribution_unit.metadata: + if not distribution_unit.files: msg = "No distribution files found for unit %s" % distribution_unit logger.warning(msg) return - distro_files = distribution_unit.metadata['files'] + distro_files = distribution_unit.files total_files = len(distro_files) logger.debug("Found %s distribution files to symlink" % total_files) source_path_dir = distribution_unit.storage_path symlink_dir = self.get_working_dir() for dfile in distro_files: + if dfile['relativepath'].startswith('repodata/'): + continue source_path = os.path.join(source_path_dir, dfile['relativepath']) symlink_path = os.path.join(symlink_dir, dfile['relativepath']) - self._create_symlink(source_path, symlink_path) + plugin_misc.create_symlink(source_path, symlink_path) def _publish_distribution_packages_link(self, distribution_unit): """ @@ -805,16 +820,15 @@ def _publish_distribution_packages_link(self, distribution_unit): :param distribution_unit: The unit for the distribution from which the list of files to be published should be pulled from. - :type distribution_unit: AssociatedUnit + :type distribution_unit: pulp_rpm.plugins.db.models.Distribution """ symlink_dir = self.get_working_dir() package_path = None - if KEY_PACKAGEDIR in distribution_unit.metadata and \ - distribution_unit.metadata[KEY_PACKAGEDIR] is not None: + if distribution_unit.packagedir: # The packages_dir is a relative directory that exists underneath the repo directory # Verify that this directory is valid. - package_path = os.path.join(symlink_dir, distribution_unit.metadata[KEY_PACKAGEDIR]) + package_path = os.path.join(symlink_dir, distribution_unit.packagedir) real_symlink_dir = os.path.realpath(symlink_dir) real_package_path = os.path.realpath(package_path) common_prefix = os.path.commonprefix([real_symlink_dir, real_package_path]) @@ -823,7 +837,7 @@ def _publish_distribution_packages_link(self, distribution_unit): # raise a validation exception msg = _('Error publishing repository: %(repo)s. The treeinfo file specified a ' 'packagedir \"%(packagedir)s\" that is not contained within the repository' - % {'repo': self.parent.repo.id, 'packagedir': package_path}) + % {'repo': self.get_repo().repo_id, 'packagedir': package_path}) logger.info(msg) raise InvalidValue(KEY_PACKAGEDIR) @@ -839,7 +853,7 @@ def _publish_distribution_packages_link(self, distribution_unit): self.package_dirs.append(default_packages_symlink) -class CreateIsoStep(PublishStep): +class CreateIsoStep(platform_steps.PluginStep): """ Export a directory to an ISO or a collection of ISO files @@ -850,16 +864,17 @@ def __init__(self, content_dir, output_dir): self.content_dir = content_dir self.output_dir = output_dir - def process_main(self): + def process_main(self, item=None): """ Publish a directory from to a tar file """ image_size = self.get_config().get(constants.ISO_SIZE_KEYWORD) - image_prefix = self.get_config().get(constants.ISO_PREFIX_KEYWORD) or self.get_repo().id + image_prefix = self.get_config().get(constants.ISO_PREFIX_KEYWORD) or \ + self.get_repo().repo_id generate_iso.create_iso(self.content_dir, self.output_dir, image_prefix, image_size) -class GenerateSqliteForRepoStep(PublishStep): +class GenerateSqliteForRepoStep(platform_steps.PluginStep): """ Generate the Sqlite files for a given repository using the createrepo command """ @@ -885,7 +900,7 @@ def is_skipped(self): """ return not self.get_config().get('generate_sqlite', False) - def process_main(self): + def process_main(self, item=None): """ Call out to createrepo command line in order to process the files. """ diff --git a/plugins/pulp_rpm/plugins/importers/iso/importer.py b/plugins/pulp_rpm/plugins/importers/iso/importer.py index 0e1139dc8..89c1812b4 100644 --- a/plugins/pulp_rpm/plugins/importers/iso/importer.py +++ b/plugins/pulp_rpm/plugins/importers/iso/importer.py @@ -4,6 +4,8 @@ from pulp.common import config as config_utils from pulp.common.plugins import importer_constants from pulp.plugins.importer import Importer +from pulp.server.controllers import repository as repo_controller +from pulp.server.db import model as platform_models from pulp.server.db.model.criteria import UnitAssociationCriteria from pulp_rpm.common import constants, ids @@ -99,7 +101,9 @@ def metadata(cls): 'types': [ids.TYPE_ID_ISO] } - def sync_repo(self, repo, sync_conduit, config): + def sync_repo(self, transfer_repo, sync_conduit, config): + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) + sync_conduit.repo = repo if config.get(importer_constants.KEY_FEED) is None: raise ValueError('Repository without feed cannot be synchronized') self.iso_sync = sync.ISOSyncRun(sync_conduit, config) @@ -107,27 +111,31 @@ def sync_repo(self, repo, sync_conduit, config): self.iso_sync = None return report - def upload_unit(self, repo, type_id, unit_key, metadata, file_path, conduit, config): + def upload_unit(self, transfer_repo, type_id, unit_key, metadata, file_path, conduit, config): """ See super(self.__class__, self).upload_unit() for the docblock explaining this method. In short, it handles ISO uploads. """ - iso = models.ISO(unit_key['name'], unit_key['size'], unit_key['checksum']) - iso.init_unit(conduit) + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) - shutil.move(file_path, iso.storage_path) + iso = models.ISO( + name=unit_key['name'], size=unit_key['size'], checksum=unit_key['checksum']) + existing_iso = models.ISO.objects(**iso.unit_key).first() + if existing_iso: + iso = existing_iso + + iso.set_content(file_path) validate = config.get_boolean(importer_constants.KEY_VALIDATE) validate = validate if validate is not None else constants.CONFIG_VALIDATE_DEFAULT try: # Let's validate the ISO. This will raise a # ValueError if the ISO does not validate correctly. - iso.validate(full_validation=validate) + iso.validate_iso(full_validation=validate) except ValueError, e: - # If validation raises a ValueError, we should delete the file and raise - os.remove(iso.storage_path) return {'success_flag': False, 'summary': e.message, 'details': None} - iso.save_unit(conduit) + iso.save() + repo_controller.associate_single_unit(repo, iso) return {'success_flag': True, 'summary': None, 'details': None} def validate_config(self, repo, config): diff --git a/plugins/pulp_rpm/plugins/importers/iso/sync.py b/plugins/pulp_rpm/plugins/importers/iso/sync.py index 6f6438481..00491d1e0 100644 --- a/plugins/pulp_rpm/plugins/importers/iso/sync.py +++ b/plugins/pulp_rpm/plugins/importers/iso/sync.py @@ -2,6 +2,8 @@ from gettext import gettext as _ from urlparse import urljoin import logging +import os +import tempfile from nectar import listener, request from nectar.config import DownloaderConfig @@ -9,9 +11,12 @@ from nectar.downloaders.local import LocalFileDownloader from pulp.common.plugins import importer_constants from pulp.common.util import encode_unicode +from pulp.plugins.util import publish_step as platform_steps +from pulp.server.controllers import repository as repo_controller from pulp.server.db.model.criteria import Criteria, UnitAssociationCriteria +from pulp.server.managers.repo import _common as common_utils -from pulp_rpm.common import constants +from pulp_rpm.common import constants, ids from pulp_rpm.common.progress import SyncProgressReport from pulp_rpm.plugins.db import models @@ -140,10 +145,13 @@ def download_succeeded(self, report): # This will update our bytes downloaded self.download_progress(report) iso = report.data + iso.set_content(report.destination) try: if self._validate_downloads: - iso.validate() - iso.save_unit(self.sync_conduit) + iso.validate_iso(storage_path=report.destination) + iso.save() + repo_controller.associate_single_unit(self.sync_conduit.repo, iso) + # We can drop this ISO from the url --> ISO map self.progress_report.num_isos_finished += 1 self.progress_report.update_progress() @@ -174,13 +182,13 @@ def perform_sync(self): # Associate units that are already in Pulp if local_available_isos: search_dicts = [unit.unit_key for unit in local_available_isos] - self.sync_conduit.associate_existing(models.ISO.TYPE, search_dicts) + self.sync_conduit.associate_existing(models.ISO.unit_type_id, search_dicts) # Go get them filez self.progress_report.state = self.progress_report.STATE_ISOS_IN_PROGRESS self._download_isos(local_missing_isos) if self._remove_missing_units: - self._remove_units(remote_missing_isos) + repo_controller.disassociate_units(self.sync_conduit.repo, remote_missing_isos) # Report that we are finished. Note that setting the # state to STATE_ISOS_COMPLETE will automatically set the state to STATE_ISOS_FAILED if the @@ -203,14 +211,18 @@ def _download_isos(self, manifest): # For each ISO in the manifest, we need to determine a relative path where we want # it to be stored, and initialize the Unit that will represent it for iso in manifest: - iso.init_unit(self.sync_conduit) iso.bytes_downloaded = 0 # Set the total bytes onto the report self.progress_report.total_bytes += iso.size self.progress_report.update_progress() # We need to build a list of DownloadRequests - download_requests = [request.DownloadRequest(iso.url, iso.storage_path, iso) for - iso in manifest] + download_directory = common_utils.get_working_directory() + download_requests = [] + for iso in manifest: + iso_tmp_dir = tempfile.mkdtemp(dir=download_directory) + iso_name = os.path.basename(iso.url) + iso_download_path = os.path.join(iso_tmp_dir, iso_name) + download_requests.append(request.DownloadRequest(iso.url, iso_download_path, iso)) self.downloader.download(download_requests) def _download_manifest(self): @@ -257,35 +269,26 @@ def _filter_missing_isos(self, manifest): the remote repo. :rtype: tuple """ - - def _unit_key_str(iso): - """ - Return a simple string representation of the unit key of the ISO. - - :param iso: The ISO for which we want a unit key string representation - :type iso: pulp_rpm.plugins.db.models.ISO - """ - return '%s-%s-%s' % (iso.name, iso.checksum, iso.size) - # A list of all the ISOs we have in Pulp - search_criteria = Criteria(fields=models.ISO.UNIT_KEY_ISO) - existing_units = self.sync_conduit.search_all_units(models.ISO.TYPE, search_criteria) - existing_units_by_key = dict([(_unit_key_str(models.ISO.from_unit(unit)), unit) + existing_units = models.ISO.objects() + existing_units_by_key = dict([(unit.unit_key_str, unit) for unit in existing_units]) - existing_unit_keys = set([_unit_key_str(models.ISO.from_unit(unit)) + existing_units.rewind() + existing_unit_keys = set([unit.unit_key_str for unit in existing_units]) # A list of units currently associated with the repository - search_criteria = UnitAssociationCriteria(type_ids=[models.ISO.TYPE]) - existing_repo_units = self.sync_conduit.get_units(search_criteria) - existing_repo_units_by_key = dict([(_unit_key_str(models.ISO.from_unit(unit)), unit) + existing_repo_units = repo_controller.find_repo_content_units( + self.sync_conduit.repo, yield_content_unit=True) + existing_repo_units = list(existing_repo_units) + existing_repo_units_by_key = dict([(unit.unit_key_str, unit) for unit in existing_repo_units]) - existing_repo_unit_keys = set([_unit_key_str(models.ISO.from_unit(unit)) + existing_repo_unit_keys = set([unit.unit_key_str for unit in existing_repo_units]) # A list of the ISOs in the remote repository - available_isos_by_key = dict([(_unit_key_str(iso), iso) for iso in manifest]) - available_iso_keys = set([_unit_key_str(iso) for iso in manifest]) + available_isos_by_key = dict([(iso.unit_key_str, iso) for iso in manifest]) + available_iso_keys = set([iso.unit_key_str for iso in manifest]) # Content that is available locally and just needs to be associated with the repository local_available_iso_keys = set([iso for iso in available_iso_keys @@ -302,13 +305,3 @@ def _unit_key_str(iso): remote_missing_units = [existing_repo_units_by_key[k] for k in remote_missing_unit_keys] return local_missing_isos, local_available_units, remote_missing_units - - def _remove_units(self, units): - """ - Use the sync_conduit's remove_unit call for each unit in units. - - :param units: List of pulp.plugins.model.Units that we want to remove from the repository - :type units: list - """ - for unit in units: - self.sync_conduit.remove_unit(unit) diff --git a/plugins/pulp_rpm/plugins/importers/yum/associate.py b/plugins/pulp_rpm/plugins/importers/yum/associate.py index e9fac6e3d..f07521f21 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/associate.py +++ b/plugins/pulp_rpm/plugins/importers/yum/associate.py @@ -4,6 +4,7 @@ import shutil from pulp.server.db.model.criteria import UnitAssociationCriteria +from pulp.server.controllers import repository as repo_controller from pulp_rpm.common import constants from pulp_rpm.plugins.db import models @@ -23,9 +24,9 @@ def associate(source_repo, dest_repo, import_conduit, config, units=None): may be garbage collected. :param source_repo: source repo - :type source_repo: pulp.plugins.model.Repository + :type source_repo: pulp.server.db.model.Repository :param dest_repo: destination repo - :type dest_repo: pulp.plugins.model.Repository + :type dest_repo: pulp.server.db.model.Repository :param import_conduit: import conduit passed to the Importer :type import_conduit: pulp.plugins.conduits.unit_import.ImportUnitConduit :param config: config object for the distributor @@ -37,7 +38,7 @@ def associate(source_repo, dest_repo, import_conduit, config, units=None): if units is None: # this might use a lot of RAM since RPMs tend to have lots of metadata # TODO: so we should probably do something about that - units = import_conduit.get_source_units() + units = repo_controller.find_repo_content_units(source_repo, yield_content_unit=True) # get config items that we care about recursive = config.get(constants.CONFIG_RECURSIVE) @@ -49,8 +50,8 @@ def associate(source_repo, dest_repo, import_conduit, config, units=None): units = None associated_units |= copy_rpms( - (unit for unit in associated_units if unit.type_id == models.RPM.TYPE), - import_conduit, recursive) + (unit for unit in associated_units if isinstance(unit, models.RPM)), + source_repo, dest_repo, import_conduit, recursive) # return here if we shouldn't get child units if not recursive: @@ -70,12 +71,13 @@ def associate(source_repo, dest_repo, import_conduit, config, units=None): wanted_rpms = get_rpms_to_copy_by_key(rpm_search_dicts, import_conduit) rpm_search_dicts = None rpms_to_copy = filter_available_rpms(wanted_rpms, import_conduit) - associated_units |= copy_rpms(rpms_to_copy, import_conduit, recursive) + associated_units |= copy_rpms(rpms_to_copy, source_repo, dest_repo, import_conduit, recursive) rpms_to_copy = None # ------ get RPM children of groups ------ names_to_copy = get_rpms_to_copy_by_name(rpm_names, import_conduit) - associated_units |= copy_rpms_by_name(names_to_copy, import_conduit, recursive) + associated_units |= copy_rpms_by_name(names_to_copy, source_repo, dest_repo, + import_conduit, recursive) return list(associated_units) @@ -156,13 +158,17 @@ def filter_available_rpms(rpms, import_conduit): import_conduit.get_source_units) -def copy_rpms(units, import_conduit, copy_deps, solver=None): +def copy_rpms(units, source_repo, dest_repo, import_conduit, copy_deps, solver=None, ): """ Copy RPMs from the source repo to the destination repo, and optionally copy dependencies as well. Dependencies are resolved recursively. :param units: iterable of Units :type units: iterable of pulp.plugins.models.Unit + :param source_repo: The repository we are copying units from. + :type source_repo: pulp.server.db.model.Repository + :param dest_repo: The repository we are copying units to + :type dest_repo: pulp.server.db.model.Repository :param import_conduit: import conduit passed to the Importer :type import_conduit: pulp.plugins.conduits.unit_import.ImportUnitConduit :param copy_deps: if True, copies dependencies as specified in "Requires" @@ -206,7 +212,8 @@ def copy_rpms(units, import_conduit, copy_deps, solver=None): _LOGGER.debug('Copying deps: %s' % str(sorted([x.unit_key['name'] for x in to_copy]))) if to_copy: - unit_set |= copy_rpms(to_copy, import_conduit, copy_deps, solver) + unit_set |= copy_rpms(to_copy, source_repo, dest_repo, import_conduit, copy_deps, + solver) return unit_set @@ -236,12 +243,16 @@ def _no_checksum_clean_unit_key(unit_tuple): return ret -def copy_rpms_by_name(names, import_conduit, copy_deps): +def copy_rpms_by_name(names, source_repo, dest_repo, import_conduit, copy_deps): """ Copy RPMs from source repo to destination repo by name :param names: iterable of RPM names :type names: iterable of basestring + :param source_repo: The repository we are copying units from. + :type source_repo: pulp.server.db.model.Repository + :param dest_repo: The repository we are copying units to + :type dest_repo: pulp.server.db.model.Repository :param import_conduit: import conduit passed to the Importer :type import_conduit: pulp.plugins.conduits.unit_import.ImportUnitConduit @@ -263,7 +274,7 @@ def copy_rpms_by_name(names, import_conduit, copy_deps): to_copy[model.key_string_without_version] = max( ((model.complete_version_serialized, unit), previous)) - return copy_rpms((unit for v, unit in to_copy.itervalues()), import_conduit, copy_deps) + return copy_rpms((unit for v, unit in to_copy.itervalues()), source_repo, dest_repo, import_conduit, copy_deps) def identify_children_to_copy(units): @@ -308,53 +319,37 @@ def _associate_unit(dest_repo, import_conduit, unit): other means :param dest_repo: destination repo - :type dest_repo: pulp.plugins.model.Repository + :type dest_repo: pulp.server.db.model.Repository :param import_conduit: import conduit passed to the Importer :type import_conduit: pulp.plugins.conduits.unit_import.ImportUnitConduit :param unit: Unit to be copied - :type unit: pulp.plugins.model.Unit + :type unit: pulp.server.db.model.ContentUnit :return: copied unit - :rtype: pulp.plugins.model.Unit + :rtype: pulp.server.db.model.ContentUnit """ - if unit.type_id in (models.PackageGroup.TYPE, - models.PackageCategory.TYPE, - models.PackageEnvironment.TYPE): - new_unit = _safe_copy_unit_without_file(unit) - new_unit.unit_key['repo_id'] = dest_repo.id - saved_unit = import_conduit.save_unit(new_unit) - return saved_unit - elif unit.type_id == models.RPM.TYPE: + if isinstance(unit, models.PackageGroup) or \ + isinstance(unit, models.PackageCategory) or \ + isinstance(unit, models.PackageEnvironment): + new_unit = copy.deepcopy(unit) + # Clear out the old id and repo_id + new_unit.id = None + new_unit.repo_id = dest_repo.repo_id + new_unit.save() + repo_controller.associate_single_unit(repository=dest_repo, unit=new_unit) + return new_unit + elif isinstance(unit, models.RPM): # copy will happen in one batch return unit - elif unit.type_id == models.YumMetadataFile.TYPE: - model = models.YumMetadataFile(unit.unit_key['data_type'], dest_repo.id, unit.metadata) - model.clean_metadata() - relative_path = os.path.join(model.relative_dir, os.path.basename(unit.storage_path)) - new_unit = import_conduit.init_unit(model.TYPE, model.unit_key, model.metadata, - relative_path) - shutil.copyfile(unit.storage_path, new_unit.storage_path) - import_conduit.save_unit(new_unit) - return new_unit + elif isinstance(unit, models.YumMetadataFile): + new_unit = copy.deepcopy(unit) + new_unit.storage_path = None + new_unit.repo_id = dest_repo.repo_id, + new_unit.set_content(unit.storage_path) + new_unit.save() + repo_controller.associate_single_unit(repository=dest_repo, unit=new_unit) + return new_unit() else: - import_conduit.associate_unit(unit) + repo_controller.associate_single_unit(repository=dest_repo, unit=unit) return unit - -def _safe_copy_unit_without_file(unit): - """ - Makes a deep copy of the unit, removes its "id", and removes anything in - "metadata" whose key starts with a "_". - - :param unit: unit to be copied - :type unit: pulp.plugins.model.Unit - - :return: copy of the unit - :rtype unit: pulp.plugins.model.Unit - """ - new_unit = copy.deepcopy(unit) - new_unit.id = None - for key in new_unit.metadata.keys(): - if key.startswith('_'): - del new_unit.metadata[key] - return new_unit diff --git a/plugins/pulp_rpm/plugins/importers/yum/depsolve.py b/plugins/pulp_rpm/plugins/importers/yum/depsolve.py index 78bc983b9..8e9ede65d 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/depsolve.py +++ b/plugins/pulp_rpm/plugins/importers/yum/depsolve.py @@ -2,10 +2,13 @@ import logging +import mongoengine from pulp.plugins.util.misc import paginate +from pulp.server.controllers import repository as repo_controller from pulp.server.db.model.criteria import UnitAssociationCriteria from pulp_rpm.common import version_utils +from pulp_rpm.common import ids from pulp_rpm.plugins.db import models @@ -159,16 +162,15 @@ def fills_requirement(self, unit): :param unit: a Unit object that will be examined to determine if it fills this requirement - :type unit: pulp.plugins.model.Unit + :type unit: pulp.server.db.model.ContentUnit :return: True if the unit satisfies the Requirement, False otherwise :rtype: bool """ - unit_key = unit.unit_key - if self.name != unit_key['name']: + if self.name != unit.name: return False # this is easier to use in the comparison than a full Unit object - unit_as_namedtuple = models.RPM.NAMEDTUPLE(**unit_key) + unit_as_namedtuple = unit.unit_key_as_named_tuple if self.flags == self.EQ: if self.is_versioned: @@ -196,15 +198,13 @@ class Solver(object): Resolves RPM dependencies within a pulp repository """ - def __init__(self, search_method): + def __init__(self, source_repo): """ - :param search_method: method that takes a UnitAssociationCriteria and - performs a search within a repository. Usually this - will be a method on a conduit such as "conduit.get_units" - :type search_method: function + :param source_repo: The source repository that is being searched + :type source_repo: pulp.server.db.model.Repository """ super(Solver, self).__init__() - self.search_method = search_method + self.source_repo = source_repo self._cached_source_with_provides = None self._cached_provides_tree = None self._cached_packages_tree = None @@ -250,21 +250,27 @@ def _build_source_with_provides(self): :return: list of (pulp.plugins.model.Unit, list of provides) """ - fields = list(models.RPM.UNIT_KEY_NAMES) - fields.extend(['provides', 'id', 'version_sort_index', 'release_sort_index']) - criteria = UnitAssociationCriteria(type_ids=[models.RPM.TYPE], unit_fields=fields) - return [self._trim_provides(unit) for unit in self.search_method(criteria, - as_generator=True)] + fields = list(models.RPM.unit_key_fields) + fields.extend(['provides', 'version_sort_index', 'release_sort_index']) + units = repo_controller.find_repo_content_units( + repository=self.source_repo, + repo_content_unit_q=mongoengine.Q(unit_type_id=ids.TYPE_ID_RPM), + unit_fields=fields, yield_content_unit=True + ) + return [self._trim_provides(unit) for unit in units] def _trim_provides(self, unit): """ A method to flatten/strip the "provides" metadata to just the name when building the list of packages. See RHBZ #1185868. + + :param unit: unit to trim + :type unit: pulp_rpm.plugins.db.models.RPM """ new_provides = [] - for provide in unit.metadata.get('provides', []): + for provide in unit.provides: new_provides.append(provide['name']) - unit.metadata['provides'] = new_provides + unit.provides = new_provides return unit @property @@ -312,19 +318,19 @@ def _build_provides_tree(self): source_units = self._source_with_provides tree = {} for unit in source_units: - my_cmp_tuple = (unit.unit_key['epoch'], unit.metadata['version_sort_index'], - unit.metadata['release_sort_index']) - for provide in unit.metadata.get('provides', []): + my_cmp_tuple = (unit.epoch, unit.version_sort_index, + unit.release_sort_index) + for provide in unit.provides: unit_dict = tree.setdefault(provide, {}) - newest_version = unit_dict.get(unit.unit_key['name'], None) + newest_version = unit_dict.get(unit.name, None) if newest_version: - newest_cmp_tuple = (newest_version.unit_key['epoch'], - newest_version.metadata['version_sort_index'], - newest_version.metadata['release_sort_index']) + newest_cmp_tuple = (newest_version.epoch, + newest_version.version_sort_index, + newest_version.release_sort_index) if cmp(my_cmp_tuple, newest_cmp_tuple) == 1: - unit_dict[unit.unit_key['name']] = unit + unit_dict[unit.name] = unit else: - unit_dict[unit.unit_key['name']] = unit + unit_dict[unit.name] = unit return tree @property @@ -365,7 +371,7 @@ def _build_packages_tree(self): """ tree = {} for unit in self._source_with_provides: - version_list = tree.setdefault(unit.unit_key['name'], []) + version_list = tree.setdefault(unit.name, []) version_list.append(unit) return tree diff --git a/plugins/pulp_rpm/plugins/importers/yum/existing.py b/plugins/pulp_rpm/plugins/importers/yum/existing.py index ceb5ac9e1..3e886bf18 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/existing.py +++ b/plugins/pulp_rpm/plugins/importers/yum/existing.py @@ -1,17 +1,19 @@ import logging import os +from pulp.plugins.loader import api as plugin_api from pulp.plugins.util.misc import paginate +from pulp.server.controllers import repository as repo_controller +from pulp.server.controllers import units as units_controller from pulp.server.db.model.criteria import Criteria, UnitAssociationCriteria -from pulp_rpm.plugins.db import models -from pulp_rpm.yum_plugin.util import get_relpath_from_unit +from pulp_rpm.common import ids _LOGGER = logging.getLogger(__name__) -def check_repo(wanted, unit_search_method): +def check_repo(wanted): """ Given an iterable of units as namedtuples, this function will search for them using the given search method and return the set of tuples that were not @@ -37,19 +39,21 @@ def check_repo(wanted, unit_search_method): sorted_units = _sort_by_type(wanted) # UAQ for each type for unit_type, values in sorted_units.iteritems(): - model = models.TYPE_MAP[unit_type] - fields = model.UNIT_KEY_NAMES + ('_storage_path',) - rpm_srpm_drpm = unit_type in (models.RPM.TYPE, models.SRPM.TYPE, models.DRPM.TYPE) - unit_keys_generator = (unit._asdict() for unit in values.copy()) + model = plugin_api.get_unit_model_by_id(unit_type) - for unit in get_existing_units(unit_keys_generator, fields, unit_type, unit_search_method): + fields = model.unit_key_fields + ('_storage_path',) + rpm_srpm_drpm = unit_type in (ids.TYPE_ID_RPM, + ids.TYPE_ID_SRPM, + ids.TYPE_ID_DRPM) + + unit_generator = (model(**unit_tuple._asdict()) for unit_tuple in values) + for unit in units_controller.find_units(unit_generator, fields=fields): if rpm_srpm_drpm: # For RPMs, SRPMs and DRPMs, also check if the file exists on the filesystem. # If not, we do not want to skip downloading the unit. if unit.storage_path is None or not os.path.isfile(unit.storage_path): continue - named_tuple = model(metadata=unit.metadata, **unit.unit_key).as_named_tuple - values.discard(named_tuple) + values.discard(unit.unit_key_as_named_tuple) ret = set() ret.update(*sorted_units.values()) @@ -93,73 +97,28 @@ def check_all_and_associate(wanted, sync_conduit): """ sorted_units = _sort_by_type(wanted) for unit_type, values in sorted_units.iteritems(): - model = models.TYPE_MAP[unit_type] - unit_fields = model.UNIT_KEY_NAMES + ('_storage_path', 'filename') - rpm_srpm_drpm = unit_type in (models.RPM.TYPE, models.SRPM.TYPE, models.DRPM.TYPE) - rpm_or_srpm = unit_type in (models.RPM.TYPE, models.SRPM.TYPE) - - unit_keys_generator = (unit._asdict() for unit in values.copy()) - for unit in get_all_existing_units(unit_keys_generator, unit_fields, unit_type, - sync_conduit.search_all_units): - # For RPMs, SRPMs and DRPMs, also check if the file exists on the filesystem. - # If not, we do not want to skip downloading the unit. + model = plugin_api.get_unit_model_by_id(unit_type) + fields = model.unit_key_fields + ('storage_path',) + rpm_srpm_drpm = unit_type in (ids.TYPE_ID_RPM, + ids.TYPE_ID_SRPM, + ids.TYPE_ID_DRPM) + + unit_generator = (model(**unit_tuple._asdict()) for unit_tuple in values) + for unit in units_controller.find_units(unit_generator, fields=fields): if rpm_srpm_drpm: + # For RPMs, SRPMs and DRPMs, also check if the file exists on the filesystem. + # If not, we do not want to skip downloading the unit. if unit.storage_path is None or not os.path.isfile(unit.storage_path): continue - - # Since the unit is already downloaded, call respective sync_conduit calls to import - # the unit in given repository. - if rpm_or_srpm: - unit_key = unit.unit_key - rpm_or_srpm_unit = model(unit_key['name'], unit_key['epoch'], unit_key['version'], - unit_key['release'], unit_key['arch'], - unit_key['checksumtype'], unit_key['checksum'], - unit.metadata) - relative_path = rpm_or_srpm_unit.relative_path - else: - relative_path = get_relpath_from_unit(unit) - downloaded_unit = sync_conduit.init_unit(unit_type, unit.unit_key, - unit.metadata, relative_path) - - # 1125388 - make sure we keep storage_path on the new unit model obj - downloaded_unit.storage_path = unit.storage_path - sync_conduit.save_unit(downloaded_unit) - - # Discard already downloaded unit from the return value. - named_tuple = model(metadata=unit.metadata, **unit.unit_key).as_named_tuple - values.discard(named_tuple) + # Add the existing unit to the repository + repo_controller.associate_single_unit(sync_conduit.repo, unit) + values.discard(unit.unit_key_as_named_tuple) ret = set() ret.update(*sorted_units.values()) return ret -def get_all_existing_units(search_dicts, unit_fields, unit_type, search_method): - """ - Get all existing units on the server which match given search_dicts using - given search_method. - - :param search_dicts: unit keys generator - :type search_dicts: iterator of unit keys - :param unit_fields: unit fields to be requested to the search_method - :type unit_fields: list or tuple - :param unit_type: unit type - :type unit_type: basestring - :param search_method: search method to be used to search for non-repo-specific units - :type search_method: a search method accepting a unit type and - pulp.server.db.criteria.Criteria as parameters - :return: generator of Units found using the search_method - :rtype: iterator of pulp.plugins.model.Unit - """ - # Instead of separate query for each unit, we are using paginate to query - # for a lot of units at once. - for segment in paginate(search_dicts): - unit_filters = {'$or': list(segment)} - criteria = Criteria(filters=unit_filters, fields=unit_fields) - for result in search_method(unit_type, criteria): - yield result - - def _sort_by_type(wanted): ret = {} for unit in wanted: diff --git a/plugins/pulp_rpm/plugins/importers/yum/importer.py b/plugins/pulp_rpm/plugins/importers/yum/importer.py index f25311cb9..a68972e95 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/importer.py +++ b/plugins/pulp_rpm/plugins/importers/yum/importer.py @@ -2,9 +2,9 @@ from pulp.plugins.importer import Importer from pulp.common.config import read_json_config +from pulp.server.db import model as platform_models from pulp_rpm.common import ids -from pulp_rpm.plugins.db import models from pulp_rpm.plugins.importers.yum import sync, associate, upload, config_validate @@ -35,23 +35,29 @@ def metadata(cls): 'types': [ ids.TYPE_ID_DISTRO, ids.TYPE_ID_DRPM, ids.TYPE_ID_ERRATA, ids.TYPE_ID_PKG_GROUP, ids.TYPE_ID_PKG_CATEGORY, ids.TYPE_ID_RPM, - ids.TYPE_ID_SRPM, models.YumMetadataFile.TYPE, ids.TYPE_ID_PKG_ENVIRONMENT + ids.TYPE_ID_SRPM, ids.TYPE_ID_YUM_REPO_METADATA_FILE, ids.TYPE_ID_PKG_ENVIRONMENT ] } def validate_config(self, repo, config): return config_validate.validate(config) - def import_units(self, source_repo, dest_repo, import_conduit, config, units=None): + def import_units(self, source_transfer_repo, dest_transfer_repo, import_conduit, config, units=None): + source_repo = platform_models.Repository.objects.get(repo_id=source_transfer_repo.id) + dest_repo = platform_models.Repository.objects.get(repo_id=dest_transfer_repo.id) + return associate.associate(source_repo, dest_repo, import_conduit, config, units) - def upload_unit(self, repo, type_id, unit_key, metadata, file_path, conduit, config): + def upload_unit(self, transfer_repo, type_id, unit_key, metadata, file_path, conduit, config): + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) + repo.repo_id = transfer_repo.id + conduit.repo = repo return upload.upload(repo, type_id, unit_key, metadata, file_path, conduit, config) - def sync_repo(self, repo, sync_conduit, call_config): + def sync_repo(self, transfer_repo, sync_conduit, call_config): """ - :param repo: metadata describing the repository - :type repo: pulp.plugins.model.Repository + :param transfer_repo: metadata describing the repository + :type transfer_repo: pulp.plugins.model.Repository :param sync_conduit: provides access to relevant Pulp functionality :type sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit @@ -62,6 +68,8 @@ def sync_repo(self, repo, sync_conduit, call_config): :return: report of the details of the sync :rtype: pulp.plugins.model.SyncReport """ + repo = platform_models.Repository.objects.get(repo_id=transfer_repo.id) + sync_conduit.repo = repo self._current_sync = sync.RepoSync(repo, sync_conduit, call_config) report = self._current_sync.run() self._current_sync.finalize() diff --git a/plugins/pulp_rpm/plugins/importers/yum/listener.py b/plugins/pulp_rpm/plugins/importers/yum/listener.py index 21774453b..df9b0b6ba 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/listener.py +++ b/plugins/pulp_rpm/plugins/importers/yum/listener.py @@ -1,9 +1,9 @@ import logging -import shutil from nectar.listener import DownloadEventListener, AggregatingEventListener from pulp.common.plugins import importer_constants from pulp.plugins.util import verification +from pulp.server.controllers import repository as repo_controller from pulp_rpm.common import constants from pulp_rpm.plugins.db import models @@ -78,21 +78,14 @@ def download_succeeded(self, report): # these are the only types we store repo metadata snippets on in the DB if isinstance(model, (models.RPM, models.SRPM)): self.metadata_files.add_repodata(model) - # init unit, which is idempotent - unit = self.sync_conduit.init_unit(model.TYPE, model.unit_key, model.metadata, - model.relative_path) - # check if the unit has duplicate nevra - repo_id = self.sync_conduit.repo_id - purge.remove_unit_duplicate_nevra(unit.unit_key, unit.type_id, repo_id) - # move to final location. - # we cannot use here shutil.move because it preserves all the file attributes, - # even the selinux labels from the the source directory, that has different label - # from the desination one. - # we dont't have to worry about the content from working directory as it gets cleaned up, - # when a task finishes. - shutil.copy(report.destination, unit.storage_path) - # save unit - self.sync_conduit.save_unit(unit) + + purge.remove_unit_duplicate_nevra(model.unit_key, model._content_type_id, self.sync_conduit.repo) + + model.set_content(report.destination) + model.save() + + repo_controller.associate_single_unit(self.sync_conduit.repo, model) + self.progress_report['content'].success(model) self.sync_conduit.set_progress(self.progress_report) @@ -114,7 +107,7 @@ def _verify_size(self, model, report): fails, the error is noted in this instance's progress report and the error is re-raised. :param model: domain model instance of the package that was downloaded - :type model: pulp_rpm.plugins.db.models.RPM + :type model: pulp_rpm.plugins.db.models.RpmBase :param report: report handed to this listener by the downloader :type report: nectar.report.DownloadReport @@ -126,13 +119,13 @@ def _verify_size(self, model, report): try: with open(report.destination) as dest_file: - verification.verify_size(dest_file, model.metadata['size']) + verification.verify_size(dest_file, model.size) except verification.VerificationException, e: error_report = { constants.UNIT_KEY: model.unit_key, constants.ERROR_CODE: constants.ERROR_SIZE_VERIFICATION, - constants.ERROR_KEY_EXPECTED_SIZE: model.metadata['size'], + constants.ERROR_KEY_EXPECTED_SIZE: model.size, constants.ERROR_KEY_ACTUAL_SIZE: e[0] } self.progress_report['content'].failure(model, error_report) @@ -145,7 +138,7 @@ def _verify_checksum(self, model, report): fails, the error is noted in this instance's progress report and the error is re-raised. :param model: domain model instance of the package that was downloaded - :type model: pulp_rpm.plugins.db.models.RPM + :type model: pulp_rpm.plugins.db.models.RpmBase :param report: report handed to this listener by the downloader :type report: nectar.report.DownloadReport @@ -157,24 +150,24 @@ def _verify_checksum(self, model, report): try: with open(report.destination) as dest_file: - verification.verify_checksum(dest_file, model.unit_key['checksumtype'], - model.unit_key['checksum']) + verification.verify_checksum(dest_file, model.checksumtype, + model.checksum) except verification.VerificationException, e: error_report = { - constants.NAME: model.unit_key['name'], + constants.NAME: model.name, constants.ERROR_CODE: constants.ERROR_CHECKSUM_VERIFICATION, - constants.CHECKSUM_TYPE: model.unit_key['checksumtype'], - constants.ERROR_KEY_CHECKSUM_EXPECTED: model.unit_key['checksum'], + constants.CHECKSUM_TYPE: model.checksumtype, + constants.ERROR_KEY_CHECKSUM_EXPECTED: model.checksum, constants.ERROR_KEY_CHECKSUM_ACTUAL: e[0] } self.progress_report['content'].failure(model, error_report) raise except verification.InvalidChecksumType, e: error_report = { - constants.NAME: model.unit_key['name'], + constants.NAME: model.name, constants.ERROR_CODE: constants.ERROR_CHECKSUM_TYPE_UNKNOWN, - constants.CHECKSUM_TYPE: model.unit_key['checksumtype'], + constants.CHECKSUM_TYPE: model.checksumtype, constants.ACCEPTED_CHECKSUM_TYPES: verification.CHECKSUM_FUNCTIONS.keys() } self.progress_report['content'].failure(model, error_report) diff --git a/plugins/pulp_rpm/plugins/importers/yum/parse/treeinfo.py b/plugins/pulp_rpm/plugins/importers/yum/parse/treeinfo.py index be4676391..a7e4dfd3b 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/parse/treeinfo.py +++ b/plugins/pulp_rpm/plugins/importers/yum/parse/treeinfo.py @@ -5,12 +5,13 @@ import tempfile from lxml import etree as ET +import mongoengine from nectar.listener import AggregatingEventListener from nectar.request import DownloadRequest from pulp.plugins.util import verification from pulp.server.exceptions import PulpCodedValidationException -from pulp.server.db.model.criteria import UnitAssociationCriteria -from pulp.server.util import copytree as pulp_copytree +from pulp.server.controllers import repository as repo_controller +from pulp.server.db import model as platform_models from pulp_rpm.common import constants, ids from pulp_rpm.plugins.db import models @@ -29,10 +30,12 @@ _LOGGER = logging.getLogger(__name__) -def sync(sync_conduit, feed, working_dir, nectar_config, report, progress_callback): +def sync(repo, sync_conduit, feed, working_dir, nectar_config, report, progress_callback): """ Look for a distribution in the target repo and sync it if found + :param repo: The repository that is the target of the sync + :type repo: pulp.server.db.model.Repository :param sync_conduit: conduit provided by the platform :type sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param feed: URL of the yum repo being sync'd @@ -47,9 +50,6 @@ def sync(sync_conduit, feed, working_dir, nectar_config, report, progress_callba :param progress_callback: function that takes no arguments but induces the current progress report to be sent. """ - # this temporary dir will hopefully be moved to the unit's storage path - # if all downloads go well. If not, it will be deleted below, ensuring a - # complete cleanup tmp_dir = tempfile.mkdtemp(dir=working_dir) try: treefile_path = get_treefile(feed, tmp_dir, nectar_config) @@ -64,8 +64,10 @@ def sync(sync_conduit, feed, working_dir, nectar_config, report, progress_callba report['state'] = constants.STATE_FAILED return - distribution_type_criteria = UnitAssociationCriteria(type_ids=[ids.TYPE_ID_DISTRO]) - existing_units = sync_conduit.get_units(criteria=distribution_type_criteria) + existing_units = repo_controller.find_repo_content_units( + repo, repo_content_unit_q=mongoengine.Q(unit_type_id=ids.TYPE_ID_DISTRO), + yield_content_unit=True) + existing_units = list(existing_units) # skip this whole process if the upstream treeinfo file hasn't changed if len(existing_units) == 1 and existing_distribution_is_current(existing_units[0], model): @@ -82,21 +84,22 @@ def sync(sync_conduit, feed, working_dir, nectar_config, report, progress_callba _LOGGER.debug('downloading distribution files') downloader.download(file_to_download_request(f, feed, tmp_dir) for f in files) if len(listener.failed_reports) == 0: - unit = sync_conduit.init_unit(ids.TYPE_ID_DISTRO, model.unit_key, model.metadata, - model.relative_path) + model.set_content(tmp_dir) + model.save() + # The save sets the content path, which is needed to generate the download_reports + # Long term this should be done by a serializer model.process_download_reports(listener.succeeded_reports) - # remove pre-existing dir - shutil.rmtree(unit.storage_path, ignore_errors=True) - pulp_copytree(tmp_dir, unit.storage_path) - # mkdtemp is very paranoid, so we'll change to more sensible perms - os.chmod(unit.storage_path, 0o775) - sync_conduit.save_unit(unit) + model.save() + + repo_controller.associate_single_unit(repo, model) + # find any old distribution units and remove them. See BZ #1150714 for existing_unit in existing_units: - if existing_unit != unit: + if existing_unit != model: _LOGGER.info("Removing out-of-date distribution unit %s for repo %s" % (existing_unit.unit_key, sync_conduit.repo_id)) - sync_conduit.remove_unit(existing_unit) + platform_models.RepositoryContentUnit.objects( + repo_id=sync_conduit.repo_id, unit_id=existing_unit.id).delete() else: _LOGGER.error('some distro file downloads failed') report['state'] = constants.STATE_FAILED @@ -114,7 +117,7 @@ def existing_distribution_is_current(existing_unit, model): make that determination. :param existing_unit: unit that currently exists in the repo - :type existing_unit: pulp.plugins.model.AssociatedUnit + :type existing_unit: pulp_rpm.plugins.db.models.Distribution :param model: this model's unit key will be searched for in the DB :type model: pulp_rpm.plugins.db.models.Distribution @@ -123,8 +126,8 @@ def existing_distribution_is_current(existing_unit, model): missing. Otherwise, True. :rtype: bool """ - existing_timestamp = existing_unit.metadata.get(KEY_TIMESTAMP) - remote_timestamp = model.metadata.get(KEY_TIMESTAMP) + existing_timestamp = existing_unit.timestamp + remote_timestamp = model.timestamp if existing_timestamp is None or remote_timestamp is None: _LOGGER.debug('treeinfo timestamp missing; will fetch upstream distribution') @@ -328,16 +331,29 @@ def parse_treefile(path): packagedir = None try: - model = models.Distribution( - parser.get(SECTION_GENERAL, 'family'), - variant, - parser.get(SECTION_GENERAL, 'version'), - parser.get(SECTION_GENERAL, 'arch'), - metadata={ - KEY_PACKAGEDIR: packagedir, - KEY_TIMESTAMP: float(parser.get(SECTION_GENERAL, KEY_TIMESTAMP)), - } + new_model = models.Distribution( + family=parser.get(SECTION_GENERAL, 'family'), + variant=variant, + version=parser.get(SECTION_GENERAL, 'version'), + arch=parser.get(SECTION_GENERAL, 'arch'), + packagedir=packagedir, + timestamp=float(parser.get(SECTION_GENERAL, KEY_TIMESTAMP)) ) + # Look for an existing distribution + existing_dist = models.Distribution.objects( + family=new_model.family, + variant=new_model.variant, + version=new_model.version, + arch=new_model.arch + ).first() + if existing_dist: + # update with the new information: + existing_dist.packagedir = packagedir + existing_dist.timestamp = new_model.timestamp + model = existing_dist + else: + model = new_model + except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): raise ValueError('invalid treefile: could not find unit key components') files = {} diff --git a/plugins/pulp_rpm/plugins/importers/yum/purge.py b/plugins/pulp_rpm/plugins/importers/yum/purge.py index 24809dc99..87763c10c 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/purge.py +++ b/plugins/pulp_rpm/plugins/importers/yum/purge.py @@ -218,10 +218,10 @@ def get_existing_units(model, unit_search_func): :type unit_search_func; function :return: iterable of Unit instances that appear in the repository - :rtype: iterable of pulp.plugins.model.Unit + :rtype: iterable of pulp.server.db.model.ContentUnit """ - criteria = UnitAssociationCriteria([model.TYPE], - unit_fields=model.UNIT_KEY_NAMES) + criteria = UnitAssociationCriteria([model.unit_type_id], + unit_fields=model.unit_key_fields) return unit_search_func(criteria) diff --git a/plugins/pulp_rpm/plugins/importers/yum/repomd/alternate.py b/plugins/pulp_rpm/plugins/importers/yum/repomd/alternate.py index 2e5c9b347..c9cd743ae 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/repomd/alternate.py +++ b/plugins/pulp_rpm/plugins/importers/yum/repomd/alternate.py @@ -74,12 +74,11 @@ def get_requests(self): :rtype: iterable """ for unit in self.units: - base_url = unit.metadata.get('base_url') or self.base_url - url = self._url_modify(base_url, path_append=unit.download_path) - file_name = os.path.basename(unit.relative_path) - destination = os.path.join(self.dst_dir, file_name) + base_url = unit.base_url or self.base_url + url = self._url_modify(base_url, path_append=unit.filename) + destination = os.path.join(self.dst_dir, unit.filename) request = Request( - type_id=unit.TYPE, + type_id=unit.unit_type_id, unit_key=unit.unit_key, url=url, destination=destination) diff --git a/plugins/pulp_rpm/plugins/importers/yum/repomd/group.py b/plugins/pulp_rpm/plugins/importers/yum/repomd/group.py index d5797167e..ced04ee87 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/repomd/group.py +++ b/plugins/pulp_rpm/plugins/importers/yum/repomd/group.py @@ -40,23 +40,24 @@ def process_group_element(repo_id, element): user_visible = _parse_bool(element.find('uservisible').text) \ if element.find('uservisible') is not None else True - return models.PackageGroup.from_package_info({ - 'conditional_package_names': conditional, - 'default': group_default, - 'default_package_names': default, - 'description': description, - # default of 1024 is from yum's own parsing of these objects - 'display_order': int(display_order.text) if display_order else 1024, - 'id': element.find('id').text, - 'langonly': langonly.text if langonly else None, - 'mandatory_package_names': mandatory, - 'name': name, - 'optional_package_names': optional, - 'repo_id': repo_id, - 'translated_description': translated_description, - 'translated_name': translated_name, - 'user_visible': user_visible, - }) + unit = models.PackageGroup() + unit.conditional_package_names = conditional + unit.default = group_default + unit.default_package_names = default + unit.description = description + # default of 1024 is from yum's own parsing of these objects + unit.display_order = int(display_order.text) if display_order else 1024 + unit.package_group_id = element.find('id').text + unit.langonly = langonly.text if langonly else None + unit.mandatory_package_names = mandatory + unit.name = name + unit.optional_package_names = optional + unit.repo_id = repo_id + unit.translated_description = translated_description + unit.translated_name = translated_name + unit.user_visible = user_visible + + return unit def process_category_element(repo_id, element): @@ -76,17 +77,17 @@ def process_category_element(repo_id, element): display_order = element.find('display_order') groups = element.find('grouplist').findall('groupid') - return models.PackageCategory.from_package_info({ - 'description': description, - # default of 1024 is from yum's own parsing of these objects - 'display_order': int(display_order.text) if display_order is not None else 1024, - 'packagegroupids': [group.text for group in groups], - 'id': element.find('id').text, - 'name': name, - 'repo_id': repo_id, - 'translated_description': translated_description, - 'translated_name': translated_name, - }) + unit = models.PackageCategory() + unit.description = description + # default of 1024 is from yum's own parsing of these objects + unit.display_order = int(display_order.text) if display_order is not None else 1024 + unit.group_ids = [group.text for group in groups] + unit.package_category_id = element.find('id').text + unit.name = name + unit.repo_id = repo_id + unit.translated_description = translated_description + unit.translated_name = translated_name + return unit def process_environment_element(repo_id, element): @@ -114,18 +115,18 @@ def process_environment_element(repo_id, element): default = group.attrib.get('default', False) options.append({'group': group.text, 'default': default}) - return models.PackageEnvironment.from_package_info({ - 'description': description, - # default of 1024 is from yum's own parsing of these objects - 'display_order': int(display_order.text) if display_order is not None else 1024, - 'group_ids': [group.text for group in groups], - 'id': element.find('id').text, - 'name': name, - 'repo_id': repo_id, - 'translated_description': translated_description, - 'translated_name': translated_name, - 'options': options - }) + unit = models.PackageEnvironment() + unit.description = description + # default of 1024 is from yum's own parsing of these objects + unit.display_order = int(display_order.text) if display_order is not None else 1024 + unit.group_ids = [group.text for group in groups] + unit.package_environment_id = element.find('id').text + unit.name = name + unit.repo_id = repo_id + unit.translated_description = translated_description + unit.translated_name = translated_name + unit.options = options + return unit def _parse_packagelist(packages): diff --git a/plugins/pulp_rpm/plugins/importers/yum/repomd/metadata.py b/plugins/pulp_rpm/plugins/importers/yum/repomd/metadata.py index e15259513..5d8ac370f 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/repomd/metadata.py +++ b/plugins/pulp_rpm/plugins/importers/yum/repomd/metadata.py @@ -312,7 +312,7 @@ def generate_db_key(unit_key): # clean out these entries if they exist, because they won't be in the # XML files we're indexing. unit_key.pop('checksum', None) - unit_key.pop('checksumtype', None) + unit_key.pop('checksum_type', None) sorted_key_names = sorted(unit_key.keys()) return '::'.join('%s:%s' % (name, unit_key[name]) for name in sorted_key_names) @@ -323,9 +323,8 @@ def add_repodata(self, model): based on data obtained in the raw XML snippets. :param model: model instance to manipulate - :type model: pulp_rpm.plugins.db.models.RPM + :type model: pulp_rpm.plugins.db.models.RpmBase """ - repodata = model.metadata.setdefault('repodata', {}) db_key = self.generate_db_key(model.unit_key) for filename, metadata_key, process_func in ( (filelists.METADATA_FILE_NAME, 'files', filelists.process_package_element), @@ -336,13 +335,13 @@ def add_repodata(self, model): raw_xml = db_file[db_key] finally: db_file.close() - repodata[filename] = raw_xml + model.repodata[filename] = raw_xml element = ElementTree.fromstring(raw_xml) unit_key, items = process_func(element) - model.metadata[metadata_key] = items + setattr(model, metadata_key, items) raw_xml = model.raw_xml - repodata['primary'] = change_location_tag(raw_xml, model.relative_path) + model.repodata['primary'] = change_location_tag(raw_xml, model.file_name) def process_repomd_data_element(data_element): diff --git a/plugins/pulp_rpm/plugins/importers/yum/repomd/presto.py b/plugins/pulp_rpm/plugins/importers/yum/repomd/presto.py index e4dfed3ca..bfcd8a81d 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/repomd/presto.py +++ b/plugins/pulp_rpm/plugins/importers/yum/repomd/presto.py @@ -27,19 +27,17 @@ def process_package_element(element): checksum = delta.find('checksum') checksum_type = verification.sanitize_checksum_type(checksum.attrib['type']) - return models.DRPM.from_package_info({ - 'type': 'drpm', - 'new_package': element.attrib['name'], - 'epoch': element.attrib['epoch'], - 'version': element.attrib['version'], - 'release': element.attrib['release'], - 'arch': element.attrib['arch'], - 'oldepoch': delta.attrib['oldepoch'], - 'oldversion': delta.attrib['oldversion'], - 'oldrelease': delta.attrib['oldrelease'], - 'filename': filename.text, - 'sequence': sequence.text, - 'size': int(size.text), - 'checksum': checksum.text, - 'checksumtype': checksum_type, - }) + return models.DRPM( + new_package=element.attrib['name'], + epoch=element.attrib['epoch'], + version=element.attrib['version'], + release=element.attrib['release'], + arch=element.attrib['arch'], + old_epoch=delta.attrib['oldepoch'], + old_version=delta.attrib['oldversion'], + old_release=delta.attrib['oldrelease'], + file_name=filename.text, + sequence=sequence.text, + size=int(size.text), + checksum=checksum.text, + checksum_type=checksum_type) diff --git a/plugins/pulp_rpm/plugins/importers/yum/repomd/primary.py b/plugins/pulp_rpm/plugins/importers/yum/repomd/primary.py index 45667e2ae..ed685b71f 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/repomd/primary.py +++ b/plugins/pulp_rpm/plugins/importers/yum/repomd/primary.py @@ -57,7 +57,7 @@ 'release': None, 'epoch': None, 'checksum': None, - 'checksumtype': None, + 'checksum_type': None, 'summary': None, 'description': None, 'changelog': None, @@ -65,7 +65,7 @@ 'url': None, 'time': None, 'size': None, - 'filename': None, + 'file_name': None, 'relative_url_path': None} PACKAGE_FORMAT_SKEL = {'vendor': None, @@ -75,7 +75,7 @@ 'buildhost': None, 'requires': [], 'provides': [], - 'sourcerpm': None, + 'source_rpm': None, 'files': []} # RPM entry dictionary --------------------------------------------------------- @@ -109,8 +109,7 @@ def process_package_element(package_element): # NOTE the use of deepcopy relies on cpython's very sensible policy of never # duplicating string literals, this may not hold up in other implementations # the python interpreter. - package_info = deepcopy(PACKAGE_INFO_SKEL) - package_info['type'] = package_element.attrib['type'] + package_info = dict() name_element = package_element.find(NAME_TAG) if name_element is not None: @@ -129,7 +128,7 @@ def process_package_element(package_element): checksum_element = package_element.find(CHECKSUM_TAG) if checksum_element is not None: checksum_type = verification.sanitize_checksum_type(checksum_element.attrib['type']) - package_info['checksumtype'] = checksum_type + package_info['checksum_type'] = checksum_type package_info['checksum'] = checksum_element.text summary_element = package_element.find(SUMMARY_TAG) @@ -162,8 +161,8 @@ def process_package_element(package_element): base_url = value package_info['base_url'] = base_url filename = os.path.basename(href) - package_info['relativepath'] = href - package_info['filename'] = filename + package_info['relative_path'] = href + package_info['file_name'] = filename # we don't make any attempt to preserve the original directory structure # this element will end up being converted back to XML and stuffed into # the DB on the unit object, so this is our chance to modify it. @@ -173,9 +172,9 @@ def process_package_element(package_element): package_info.update(_process_format_element(format_element)) if package_info['arch'].lower() == 'src': - model = models.SRPM.from_package_info(package_info) + model = models.SRPM(**package_info) else: - model = models.RPM.from_package_info(package_info) + model = models.RPM(**package_info) # add the raw XML so it can be saved in the database later rpm_namespace = utils.Namespace('rpm', RPM_SPEC_URL) model.raw_xml = utils.element_to_raw_xml(package_element, [rpm_namespace], COMMON_SPEC_URL) @@ -194,7 +193,7 @@ def _process_format_element(format_element): # NOTE the use of deepcopy relies on cpython's very sensible policy of never # duplicating string literals, this may not hold up in other implementations # the python interpreter. - package_format = deepcopy(PACKAGE_FORMAT_SKEL) + package_format = dict() if format_element is None: return package_format @@ -213,6 +212,7 @@ def _process_format_element(format_element): header_range_element = format_element.find(RPM_HEADER_RANGE_TAG) if header_range_element is not None: + package_format['header_range'] = dict() package_format['header_range']['start'] = int(header_range_element.attrib['start']) package_format['header_range']['end'] = int(header_range_element.attrib['end']) @@ -222,20 +222,20 @@ def _process_format_element(format_element): sourcerpm_element = format_element.find(RPM_SOURCERPM_TAG) if sourcerpm_element is not None: - package_format['sourcerpm'] = sourcerpm_element.text + package_format['source_rpm'] = sourcerpm_element.text provides_element = format_element.find(RPM_PROVIDES_TAG) if provides_element is not None: - package_format['provides'].extend( - _process_rpm_entry_element(e) for e in provides_element.findall(RPM_ENTRY_TAG)) + package_format['provides'] = \ + [_process_rpm_entry_element(e) for e in provides_element.findall(RPM_ENTRY_TAG)] requires_element = format_element.find(RPM_REQUIRES_TAG) if requires_element is not None: - package_format['requires'].extend( - _process_rpm_entry_element(e) for e in requires_element.findall(RPM_ENTRY_TAG)) + package_format['requires'] = \ + [_process_rpm_entry_element(e) for e in requires_element.findall(RPM_ENTRY_TAG)] - package_format['files'].extend( - _process_file_element(e) for e in format_element.findall(FILE_TAG)) + package_format['files'] = \ + [_process_file_element(e) for e in format_element.findall(FILE_TAG)] return package_format diff --git a/plugins/pulp_rpm/plugins/importers/yum/repomd/updateinfo.py b/plugins/pulp_rpm/plugins/importers/yum/repomd/updateinfo.py index 068780db3..ac4330f97 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/repomd/updateinfo.py +++ b/plugins/pulp_rpm/plugins/importers/yum/repomd/updateinfo.py @@ -30,8 +30,8 @@ def process_package_element(element): description_text = '' package_info = { 'description': description_text, - 'from': element.attrib['from'], - 'id': element.find('id').text, + 'errata_from': element.attrib['from'], + 'errata_id': element.find('id').text, 'issued': '', 'pushcount': '', # yum defaults this to False, and sets it to True if any package in @@ -72,7 +72,7 @@ def process_package_element(element): if updated_element is not None: package_info['updated'] = updated_element.attrib['date'] - return models.Errata.from_package_info(package_info) + return models.Errata(**package_info) def _parse_reference(element): diff --git a/plugins/pulp_rpm/plugins/importers/yum/report.py b/plugins/pulp_rpm/plugins/importers/yum/report.py index fc2c5d7b3..45578220d 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/report.py +++ b/plugins/pulp_rpm/plugins/importers/yum/report.py @@ -4,20 +4,21 @@ import logging from pulp_rpm.common import constants +from pulp_rpm.common import ids from pulp_rpm.plugins.db import models _logger = logging.getLogger(__name__) type_done_map = { - models.RPM.TYPE: 'rpm_done', - models.SRPM.TYPE: 'rpm_done', - models.DRPM.TYPE: 'drpm_done', + ids.TYPE_ID_RPM: 'rpm_done', + ids.TYPE_ID_SRPM: 'rpm_done', + ids.TYPE_ID_DRPM: 'drpm_done', } type_total_map = { - 'rpm_total': models.RPM.TYPE, - 'drpm_total': models.DRPM.TYPE, + 'rpm_total': ids.TYPE_ID_RPM, + 'drpm_total': ids.TYPE_ID_DRPM, } @@ -60,15 +61,15 @@ def success(self, model): self['items_left'] -= 1 if self['items_left'] % 100 == 0: _logger.debug(_('%(n)s items left to download.') % {'n': self['items_left']}) - self['size_left'] -= model.metadata['size'] - done_attribute = type_done_map[model.TYPE] + self['size_left'] -= model.size + done_attribute = type_done_map[model.unit_type_id] self['details'][done_attribute] += 1 return self def failure(self, model, error_report): self['items_left'] -= 1 - self['size_left'] -= model.metadata['size'] - done_attribute = type_done_map[model.TYPE] + self['size_left'] -= model.size + done_attribute = type_done_map[model.unit_type_id] self['details'][done_attribute] += 1 self['error_details'].append(error_report) return self diff --git a/plugins/pulp_rpm/plugins/importers/yum/sync.py b/plugins/pulp_rpm/plugins/importers/yum/sync.py index 7f7f8b83d..776bf8b56 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/sync.py +++ b/plugins/pulp_rpm/plugins/importers/yum/sync.py @@ -15,6 +15,8 @@ from pulp.common.plugins import importer_constants from pulp.plugins.util import nectar_config as nectar_utils, verification from pulp.server.exceptions import PulpCodedException +from pulp.server.managers.repo import _common as common_utils +from pulp.server.controllers import repository as repo_controller from pulp_rpm.common import constants, ids from pulp_rpm.plugins import error_codes @@ -39,7 +41,7 @@ class RepoSync(object): def __init__(self, repo, sync_conduit, call_config): """ :param repo: metadata describing the repository - :type repo: pulp.plugins.model.Repository + :type repo: pulp.server.db.model.Repository :param sync_conduit: provides access to relevant Pulp functionality :type sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit @@ -48,7 +50,7 @@ def __init__(self, repo, sync_conduit, call_config): :type call_config: pulp.plugins.config.PluginCallConfiguration """ self.cancelled = False - self.working_dir = repo.working_dir + self.working_dir = common_utils.get_working_directory() self.content_report = ContentReport() self.distribution_report = DistributionReport() self.progress_status = { @@ -212,13 +214,14 @@ def run(self): _logger.info(_('Downloading additional units.')) - with self.update_state(self.distribution_report, models.Distribution.TYPE) as skip: + with self.update_state(self.distribution_report, models.Distribution.unit_type_id) \ + as skip: if not skip: - treeinfo.sync(self.sync_conduit, url, self.tmp_dir, + treeinfo.sync(self.repo, self.sync_conduit, url, self.tmp_dir, self.nectar_config, self.distribution_report, self.set_progress) - with self.update_state(self.progress_status['errata'], models.Errata.TYPE) as skip: + with self.update_state(self.progress_status['errata'], ids.TYPE_ID_ERRATA) as skip: if not (skip or self.skip_repomd_steps): self.get_errata(metadata_files) @@ -253,6 +256,7 @@ def run(self): # In case other exceptions were caught that are not related to the state of the # mirror, raise the exception immediately and do not iterate throught the rest # of the mirrors. + _logger.exception(e) self._set_failed_state(e) report = self.sync_conduit.build_failure_report(self._progress_summary, self.progress_status) @@ -261,6 +265,8 @@ def run(self): finally: # clean up whatever we may have left behind shutil.rmtree(self.tmp_dir, ignore_errors=True) + # recalculate all the unit counts + repo_controller.rebuild_content_unit_counts(self.repo) self.save_repomd_revision() _logger.info(_('Sync complete.')) @@ -405,20 +411,28 @@ def import_unknown_metadata_files(self, metadata_files): if metadata_type not in metadata_files.KNOWN_TYPES: checksum_type = file_info['checksum']['algorithm'] checksum_type = verification.sanitize_checksum_type(checksum_type) - - unit_metadata = { - 'checksum': file_info['checksum']['hex_digest'], - 'checksum_type': checksum_type, - } - model = models.YumMetadataFile(metadata_type, - self.sync_conduit.repo_id, - unit_metadata) - relative_path = os.path.join(model.relative_dir, - os.path.basename(file_info['local_path'])) - unit = self.sync_conduit.init_unit(models.YumMetadataFile.TYPE, model.unit_key, - model.metadata, relative_path) - shutil.copyfile(file_info['local_path'], unit.storage_path) - self.sync_conduit.save_unit(unit) + checksum = file_info['checksum']['hex_digest'] + # Find an existing model + model = models.YumMetadataFile.objects(data_type=metadata_type, + repo_id=self.repo.repo_id).first() + # If an existing model, use that + if model: + model.checksum = checksum + model.checksum_type = checksum_type + model.set_content(file_info['local_path']) + model.save() + else: + # Else, create a new mode + model = models.YumMetadataFile( + data_type=metadata_type, + repo_id=self.repo.repo_id, + checksum=checksum, + checksum_type=checksum_type) + model.set_content(file_info['local_path']) + model.save() + + # associate/re-associate model to the repo + repo_controller.associate_single_unit(self.repo, model) def update_content(self, metadata_files, url): """ @@ -472,7 +486,7 @@ def _decide_rpms_to_download(self, metadata_files): :return: tuple of (set(RPM.NAMEDTUPLEs), number of RPMs, total size in bytes) :rtype: tuple """ - if models.RPM.TYPE in self.call_config.get(constants.CONFIG_SKIP, []): + if ids.TYPE_ID_RPM in self.call_config.get(constants.CONFIG_SKIP, []): _logger.debug('skipping RPM sync') return set(), 0, 0 primary_file_handle = metadata_files.get_metadata_file_handle(primary.METADATA_FILE_NAME) @@ -481,12 +495,9 @@ def _decide_rpms_to_download(self, metadata_files): package_info_generator = packages.package_list_generator( primary_file_handle, primary.PACKAGE_TAG, primary.process_package_element) wanted = self._identify_wanted_versions(package_info_generator) - # check for the units that are already in the repo - not_found_in_the_repo = existing.check_repo(wanted.iterkeys(), - self.sync_conduit.get_units) # check for the units that are not in the repo, but exist on the server # and associate them to the repo - to_download = existing.check_all_and_associate(not_found_in_the_repo, + to_download = existing.check_all_and_associate(wanted.iterkeys(), self.sync_conduit) count = len(to_download) size = 0 @@ -507,7 +518,7 @@ def _decide_drpms_to_download(self, metadata_files): :return: tuple of (set(DRPM.NAMEDTUPLEs), number of DRPMs, total size in bytes) :rtype: tuple """ - if models.DRPM.TYPE in self.call_config.get(constants.CONFIG_SKIP, []): + if ids.TYPE_ID_DRPM in self.call_config.get(constants.CONFIG_SKIP, []): _logger.debug('skipping DRPM sync') return set(), 0, 0 @@ -526,12 +537,9 @@ def _decide_drpms_to_download(self, metadata_files): presto.PACKAGE_TAG, presto.process_package_element) wanted = self._identify_wanted_versions(package_info_generator) - # check for the units that are already in the repo - not_found_in_the_repo = existing.check_repo(wanted.iterkeys(), - self.sync_conduit.get_units) # check for the units that are not in the repo, but exist on the server # and associate them to the repo - to_download = existing.check_all_and_associate(not_found_in_the_repo, + to_download = existing.check_all_and_associate(wanted.iterkeys(), self.sync_conduit) count += len(to_download) for unit in to_download: @@ -668,7 +676,7 @@ def get_comps_file_units(self, metadata_files, processing_function, tag): return try: - process_func = functools.partial(processing_function, self.repo.id) + process_func = functools.partial(processing_function, self.repo.repo_id) self.save_fileless_units(group_file_handle, tag, process_func, mutable_type=True) finally: @@ -713,9 +721,9 @@ def save_fileless_units(self, file_handle, tag, process_func, mutable_type=False # if units aren't mutable, we don't need to attempt saving units that # we already have if not mutable_type and not additive_type: - wanted = (model.as_named_tuple for model in package_info_generator) + wanted = (model.unit_key_as_named_tuple for model in package_info_generator) # given what we want, filter out what we already have - to_save = existing.check_repo(wanted, self.sync_conduit.get_units) + to_save = existing.check_repo(wanted) # rewind, iterate again through the file, and save what we need file_handle.seek(0) @@ -723,16 +731,18 @@ def save_fileless_units(self, file_handle, tag, process_func, mutable_type=False tag, process_func) package_info_generator = \ - (model for model in all_packages if model.as_named_tuple in to_save) + (model for model in all_packages if model.unit_key_as_named_tuple in to_save) for model in package_info_generator: - unit = self.sync_conduit.init_unit(model.TYPE, model.unit_key, model.metadata, None) - if additive_type: - existing_unit = self.sync_conduit.find_unit_by_unit_key(model.TYPE, model.unit_key) - if existing_unit: - unit = self._concatenate_units(existing_unit, unit) + existing_unit = model.__class__.objects(**model.unit_key).first() + if not existing_unit: + model.save() + else: + if additive_type: + model = self._concatenate_units(existing_unit, model) + model.save() - self.sync_conduit.save_unit(unit) + repo_controller.associate_single_unit(self.repo, model) def _concatenate_units(self, existing_unit, new_unit): """ @@ -742,9 +752,9 @@ def _concatenate_units(self, existing_unit, new_unit): :type existing_unit: pulp.plugins.model.Unit :param new_unit: The unit we are combining with the existing unit - :type new_unit: pulp.plugins.model.Unit + :type new_unit: pulp.server.db.model.ContentUnit """ - if existing_unit.type_id != new_unit.type_id: + if existing_unit.unit_type_id != new_unit.unit_type_id: raise PulpCodedException(message="Cannot concatenate two units of different types. " "Tried to concatenate %s with %s" % (existing_unit.type_id, new_unit.type_id)) @@ -754,14 +764,14 @@ def _concatenate_units(self, existing_unit, new_unit): "Tried to concatenate %s with %s" % (existing_unit.unit_key, new_unit.unit_key)) - if existing_unit.type_id == ids.TYPE_ID_ERRATA: + if isinstance(existing_unit, models.Errata): # add in anything from new_unit that we don't already have. We key # package lists by name for this concatenation. - existing_package_list_names = [p['name'] for p in existing_unit.metadata['pkglist']] + existing_package_list_names = [p['name'] for p in existing_unit.pkglist] - for possible_new_pkglist in new_unit.metadata['pkglist']: + for possible_new_pkglist in new_unit.pkglist: if possible_new_pkglist['name'] not in existing_package_list_names: - existing_unit.metadata['pkglist'] += [possible_new_pkglist] + existing_unit.pkglist += [possible_new_pkglist] else: raise PulpCodedException(message="Concatenation of unit type %s is not supported" % existing_unit.type_id) @@ -798,20 +808,20 @@ def _identify_wanted_versions(self, package_info_generator): for model in package_info_generator: versions = wanted.setdefault(model.key_string_without_version, {}) serialized_version = model.complete_version_serialized - size = model.metadata['size'] + size = model.size # if we are limited on the number of old versions we can have, if number_old_versions_to_keep is not None: number_to_keep = number_old_versions_to_keep + 1 if len(versions) < number_to_keep: - versions[serialized_version] = (model.as_named_tuple, size) + versions[serialized_version] = (model.unit_key_as_named_tuple, size) else: smallest_version = sorted(versions.keys(), reverse=True)[:number_to_keep][-1] if serialized_version > smallest_version: del versions[smallest_version] - versions[serialized_version] = (model.as_named_tuple, size) + versions[serialized_version] = (model.unit_key_as_named_tuple, size) else: - versions[serialized_version] = (model.as_named_tuple, size) + versions[serialized_version] = (model.unit_key_as_named_tuple, size) ret = {} for units in wanted.itervalues(): for unit, size in units.itervalues(): @@ -840,5 +850,5 @@ def _filtered_unit_generator(self, units, to_download=None): if to_download is None: # assume we want to download everything yield unit - elif unit.as_named_tuple in to_download: + elif unit.unit_key_as_named_tuple in to_download: yield unit diff --git a/plugins/pulp_rpm/plugins/importers/yum/upload.py b/plugins/pulp_rpm/plugins/importers/yum/upload.py index 1b3d28e36..0f60bf11e 100644 --- a/plugins/pulp_rpm/plugins/importers/yum/upload.py +++ b/plugins/pulp_rpm/plugins/importers/yum/upload.py @@ -8,6 +8,8 @@ import rpm from pulp.plugins.util import verification +from pulp.plugins.loader import api as plugin_api +from pulp.server.controllers import repository as repo_controller from pulp.server.db.model.criteria import UnitAssociationCriteria from pulp.server.exceptions import PulpCodedValidationException, PulpCodedException @@ -83,12 +85,12 @@ def upload(repo, type_id, unit_key, metadata, file_path, conduit, config): # Dispatch to process the upload by type handlers = { - models.RPM.TYPE: _handle_package, - models.SRPM.TYPE: _handle_package, - models.PackageGroup.TYPE: _handle_group_category, - models.PackageCategory.TYPE: _handle_group_category, - models.Errata.TYPE: _handle_erratum, - models.YumMetadataFile.TYPE: _handle_yum_metadata_file, + models.RPM.unit_type_id: _handle_package, + models.SRPM.unit_type_id: _handle_package, + models.PackageGroup.unit_type_id: _handle_group_category, + models.PackageCategory.unit_type_id: _handle_group_category, + models.Errata.unit_type_id: _handle_erratum, + models.YumMetadataFile.unit_type_id: _handle_yum_metadata_file, } if type_id not in handlers: @@ -136,42 +138,22 @@ def _handle_erratum(repo, type_id, unit_key, metadata, file_path, conduit, confi """ # Validate the user specified data by instantiating the model - try: - model_class = models.TYPE_MAP[type_id] - model = model_class(metadata=metadata, **unit_key) - except TypeError: - raise ModelInstantiationError() + model_data = dict() + model_data.update(unit_key) + if metadata: + model_data.update(metadata) - unit = conduit.init_unit(model.TYPE, model.unit_key, model.metadata, None) + + model_class = plugin_api.get_unit_model_by_id(type_id) + model = model_class(**model_data) + + # TODO Find out if the unit exists, if it does, associated, if not, create + unit = conduit.init_unit(model.unit_type_id, model.unit_key, model.metadata, None) # this save must happen before the link is created, because the link logic # requires the unit to have an "id". saved_unit = conduit.save_unit(unit) - if not config.get_boolean(CONFIG_SKIP_ERRATUM_LINK): - _link_errata_to_rpms(conduit, model, saved_unit) - - -def _link_errata_to_rpms(conduit, errata_model, errata_unit): - """ - Creates links in the Pulp data model between an erratum and its RPMs. - - :param conduit: provides access to relevant Pulp functionality - :type conduit: pulp.plugins.conduits.unit_add.UnitAddConduit - :param errata_model: model object representing an errata - :type errata_model: pulp_rpm.plugins.db.models.Errata - :param errata_unit: unit object representing an errata - :type errata_unit: pulp.plugins.model.Unit - """ - fields = list(models.RPM.UNIT_KEY_NAMES) - fields.append('_storage_path') - filters = {'$or': errata_model.rpm_search_dicts} - for model_type in (models.RPM.TYPE, models.SRPM.TYPE): - criteria = UnitAssociationCriteria(type_ids=[model_type], unit_fields=fields, - unit_filters=filters) - for unit in conduit.get_units(criteria): - conduit.link_unit(errata_unit, unit, bidirectional=True) - def _handle_yum_metadata_file(repo, type_id, unit_key, metadata, file_path, conduit, config): """ @@ -187,24 +169,24 @@ def _handle_yum_metadata_file(repo, type_id, unit_key, metadata, file_path, cond """ # Validate the user specified data by instantiating the model - try: - model = models.YumMetadataFile(metadata=metadata, **unit_key) - except TypeError: - raise ModelInstantiationError() + model_data = dict() + model_data.update(unit_key) + if metadata: + model_data.update(metadata) # Replicates the logic in yum/sync.py.import_unknown_metadata_files. # The local_path variable is removed since it's not included in the metadata when # synchronized. - file_relative_path = model.metadata.pop('local_path') - relative_path = os.path.join(model.relative_dir, file_relative_path) + file_relative_path = model_data.pop('local_path') + + translated_data = models.YumMetadataFile.SERIALIZER().from_representation(model_data) + + model = models.YumMetadataFile(**translated_data) + model.set_content(file_relative_path) + model.save() # Move the file to its final storage location in Pulp - try: - unit = conduit.init_unit(model.TYPE, model.unit_key, model.metadata, relative_path) - shutil.move(file_path, unit.storage_path) - conduit.save_unit(unit) - except IOError: - raise StoreFileError() + repo_controller.associate_single_unit(conduit.repo, model) def _handle_group_category(repo, type_id, unit_key, metadata, file_path, conduit, config): @@ -233,12 +215,12 @@ def _handle_group_category(repo, type_id, unit_key, metadata, file_path, conduit else: # Validate the user specified data by instantiating the model try: - model_class = models.TYPE_MAP[type_id] + model_class = plugin_api.get_unit_model_by_id(type_id) model = model_class(metadata=metadata, **unit_key) except TypeError: raise ModelInstantiationError() - unit = conduit.init_unit(model.TYPE, model.unit_key, model.metadata, None) + unit = conduit.init_unit(model.unit_type_id, model.unit_key, model.metadata, None) conduit.save_unit(unit) @@ -295,14 +277,14 @@ def _handle_package(repo, type_id, unit_key, metadata, file_path, conduit, confi # Validate the user specified data by instantiating the model try: - model_class = models.TYPE_MAP[type_id] + model_class = plugin_api.get_unit_model_by_id(type_id) model = model_class(metadata=new_unit_metadata, **new_unit_key) except TypeError: raise ModelInstantiationError() # Move the file to its final storage location in Pulp try: - unit = conduit.init_unit(model.TYPE, model.unit_key, + unit = conduit.init_unit(model.unit_type_id, model.unit_key, model.metadata, model.relative_path) shutil.move(file_path, unit.storage_path) except IOError: @@ -421,13 +403,13 @@ def _generate_rpm_data(type_id, rpm_filename, user_metadata=None): # construct filename from metadata (BZ #1101168) if headers[rpm.RPMTAG_SOURCEPACKAGE]: - if type_id != models.SRPM.TYPE: + if type_id != models.SRPM.unit_type_id: raise PulpCodedValidationException(error_code=error_codes.RPM1002) rpm_basefilename = "%s-%s-%s.src.rpm" % (headers['name'], headers['version'], headers['release']) else: - if type_id != models.RPM.TYPE: + if type_id != models.RPM.unit_type_id: raise PulpCodedValidationException(error_code=error_codes.RPM1003) rpm_basefilename = "%s-%s-%s.%s.rpm" % (headers['name'], headers['version'], diff --git a/plugins/pulp_rpm/plugins/migrations/0011_new_importer.py b/plugins/pulp_rpm/plugins/migrations/0011_new_importer.py index dd9f94a05..ab1365f5d 100644 --- a/plugins/pulp_rpm/plugins/migrations/0011_new_importer.py +++ b/plugins/pulp_rpm/plugins/migrations/0011_new_importer.py @@ -5,7 +5,6 @@ from pulp.plugins.types import database as types_db from pulp.server.db import connection -from pulp_rpm.plugins.db.models import RPM, SRPM from pulp_rpm.plugins.importers.yum import utils from pulp_rpm.plugins.importers.yum.repomd import primary @@ -16,7 +15,7 @@ def migrate(*args, **kwargs): - for type_id in (RPM.TYPE, SRPM.TYPE): + for type_id in ('rpm', 'srpm'): _migrate_collection(type_id) @@ -43,7 +42,7 @@ def _migrate_collection(type_id): # add these attributes, which we previously didn't track in the DB. package['size'] = int(primary_element.find('size').attrib['package']) - if type_id == RPM.TYPE: + if type_id == 'rpm': package['sourcerpm'] = format_element.find('sourcerpm').text package['summary'] = primary_element.find('summary').text diff --git a/plugins/pulp_rpm/plugins/migrations/0016_new_yum_distributor.py b/plugins/pulp_rpm/plugins/migrations/0016_new_yum_distributor.py index b71dd51af..a6db3f5fa 100644 --- a/plugins/pulp_rpm/plugins/migrations/0016_new_yum_distributor.py +++ b/plugins/pulp_rpm/plugins/migrations/0016_new_yum_distributor.py @@ -171,7 +171,7 @@ def _re_publish_repository(repo_obj, distributor): config = PluginCallConfiguration(NEW_DISTRIBUTOR_CONF, distributor['config']) publisher = Publisher(repo, conduit, config, YUM_DISTRIBUTOR_ID) - publisher.publish() + publisher.process_lifecycle() def _remove_legacy_publish_dirs(): diff --git a/plugins/pulp_rpm/plugins/migrations/0022_rename_unit_id_fields.py b/plugins/pulp_rpm/plugins/migrations/0022_rename_unit_id_fields.py new file mode 100644 index 000000000..95674e5e0 --- /dev/null +++ b/plugins/pulp_rpm/plugins/migrations/0022_rename_unit_id_fields.py @@ -0,0 +1,52 @@ +""" +This migration renames `id` fields of each unit collection to +something more specificof the units collections to something +more specific. This works around mongoengines inability to +have a _id and id field on a document. +""" +from pulp.server.db import connection + + +def migrate(*args, **kwargs): + """ + Perform the migration as described in this module's docblock. + + :param args: unused + :type args: list + :param kwargs: unused + :type kwargs: dict + """ + + migrate_id('units_distribution', 'distribution_id') + migrate_id('units_erratum', 'errata_id') + migrate_id('units_package_group', 'package_group_id') + migrate_id('units_package_category', 'package_category_id') + migrate_id('units_package_environment', 'package_environment_id') + + +def migrate_id(collection, new_field_name): + """ + Migrate a given collection + + Drop all indexes in the collection containing the 'id' field + and rename the id field to a new name + + :param collection: the name of the collection to migrate + :type collection: str + :param new_field_name: The new name for the 'id' field + :type new_field_name: str + """ + collection = connection.get_collection(collection) + # Drop any index containing an id + index_info = collection.index_information() + indexes_to_drop = [] + for index_name, index_details in index_info.iteritems(): + for index_key in index_details['key']: + if index_key[0] == 'id': + indexes_to_drop.append(index_name) + + for index in indexes_to_drop: + collection.drop_index(index) + + # Rename the id + collection.update({}, {'$rename': {'id': new_field_name}}) diff --git a/plugins/pulp_rpm/plugins/serializers.py b/plugins/pulp_rpm/plugins/serializers.py new file mode 100644 index 000000000..3d79ff1f8 --- /dev/null +++ b/plugins/pulp_rpm/plugins/serializers.py @@ -0,0 +1,89 @@ +from pulp.server.webservices.views import serializers as platform_serializers + + +class Distribution(platform_serializers.ModelSerializer): + """ + Serializer for a RpmBase based models + """ + class Meta: + remapped_fields = {'distribution_id': 'id', + 'user_metadata': 'pulp_user_metadata'} + + +class Drpm(platform_serializers.ModelSerializer): + """ + Serializer for a RpmBase based models + """ + class Meta: + remapped_fields = {'file_name': 'filename', + 'checksum_type': 'checksumtype', + 'old_epoch': 'oldepoch', + 'old_version': 'oldversion', + 'old_release': 'oldrelease', + 'user_metadata': 'pulp_user_metadata'} + + +class RpmBase(platform_serializers.ModelSerializer): + """ + Serializer for a RpmBase based models + """ + class Meta: + remapped_fields = {'checksum_type': 'checksumtype', + 'file_name': 'filename', + 'relative_path': 'relativepath', + 'source_rpm': 'sourcerpm', + 'user_metadata': 'pulp_user_metadata'} + + +class Errata(platform_serializers.ModelSerializer): + """ + Serializer for a Errata models + """ + class Meta: + remapped_fields = {'errata_from': 'from', + 'errata_id': 'id', + 'user_metadata': 'pulp_user_metadata'} + + +class PackageGroup(platform_serializers.ModelSerializer): + """ + Serializer for a PackageGroup models + """ + class Meta: + remapped_fields = {'package_group_id': 'id', + 'user_metadata': 'pulp_user_metadata'} + + +class PackageCategory(platform_serializers.ModelSerializer): + """ + Serializer for a PackageCategory models + """ + class Meta: + remapped_fields = {'package_category_id': 'id', + 'group_ids': 'packagegroupids', + 'user_metadata': 'pulp_user_metadata'} + + +class PackageEnvironment(platform_serializers.ModelSerializer): + """ + Serializer for a PackageEnvironment models + """ + class Meta: + remapped_fields = {'package_environment_id': 'id', + 'user_metadata': 'pulp_user_metadata'} + + +class YumMetadataFile(platform_serializers.ModelSerializer): + """ + Serializer for a YumMetadataFile models + """ + class Meta: + remapped_fields = {'user_metadata': 'pulp_user_metadata'} + + +class ISO(platform_serializers.ModelSerializer): + """ + Serializer for a ISO models + """ + class Meta: + remapped_fields = {'user_metadata': 'pulp_user_metadata'} diff --git a/plugins/setup.py b/plugins/setup.py index 27ad64e29..40a864671 100755 --- a/plugins/setup.py +++ b/plugins/setup.py @@ -32,6 +32,18 @@ ], 'pulp.server.db.migrations': [ 'pulp_rpm = pulp_rpm.plugins.migrations' + ], + 'pulp.unit_models': [ + 'rpm=pulp_rpm.plugins.db.models:RPM', + 'srpm=pulp_rpm.plugins.db.models:SRPM', + 'drpm=pulp_rpm.plugins.db.models:DRPM', + 'distribution=pulp_rpm.plugins.db.models:Distribution', + 'erratum=pulp_rpm.plugins.db.models:Errata', + 'package_group=pulp_rpm.plugins.db.models:PackageGroup', + 'package_category=pulp_rpm.plugins.db.models:PackageCategory', + 'package_environment=pulp_rpm.plugins.db.models:PackageEnvironment', + 'yum_repo_metadata_file=pulp_rpm.plugins.db.models:YumMetadataFile', + 'iso=pulp_rpm.plugins.db.models:ISO' ] } ) diff --git a/plugins/test/unit/plugins/distributors/yum/metadata/test_other.py b/plugins/test/unit/plugins/distributors/yum/metadata/test_other.py index be4ac8e53..ebe78e2cc 100644 --- a/plugins/test/unit/plugins/distributors/yum/metadata/test_other.py +++ b/plugins/test/unit/plugins/distributors/yum/metadata/test_other.py @@ -7,7 +7,7 @@ from pulp_rpm.plugins.distributors.yum.metadata.other import OtherXMLFileContext -class PrimaryXMLFileContextTests(unittest.TestCase): +class OtherXMLFileContextTests(unittest.TestCase): def setUp(self): self.working_dir = tempfile.mkdtemp() self.context = OtherXMLFileContext(self.working_dir, 3) diff --git a/plugins/test/unit/plugins/distributors/yum/test_configuration.py b/plugins/test/unit/plugins/distributors/yum/test_configuration.py index 70146f6cc..306b68b4e 100644 --- a/plugins/test/unit/plugins/distributors/yum/test_configuration.py +++ b/plugins/test/unit/plugins/distributors/yum/test_configuration.py @@ -8,8 +8,8 @@ from mock import MagicMock, patch, ANY from pulp.plugins.conduits.repo_config import RepoConfigConduit from pulp.plugins.config import PluginCallConfiguration -from pulp.plugins.model import Repository from pulp.server.exceptions import MissingResource +from pulp.server.db.model import Repository from pulp_rpm.common.constants import CONFIG_KEY_CHECKSUM_TYPE, \ SCRATCHPAD_DEFAULT_METADATA_CHECKSUM, CONFIG_DEFAULT_CHECKSUM @@ -469,7 +469,7 @@ def test_load_config_fails(self, mock_log): # -- conflicting relative paths -------------------------------------------- def test_relative_path_conflicts_none(self): - repo = Repository('test') + repo = Repository(repo_id='test') config = {} conduit = mock.MagicMock() conduit.get_repo_distributors_by_relative_url = mock.MagicMock(return_value=[]) @@ -481,7 +481,7 @@ def test_relative_path_conflicts_none(self): self.assertEqual(len(error_messages), 0) def test_relative_path_conflicts_with_relative_path(self): - repo = Repository('test') + repo = Repository(repo_id='test') config = {'relative_url': 'test'} conflicting_distributor = {'repo_id': 'zoo_repo', 'config': {'relative_url': 'test'}} @@ -498,7 +498,7 @@ def test_relative_path_conflicts_with_relative_path(self): self.assertEqual(error_messages, [message]) def test_relative_path_conflicts_with_repo_id(self): - repo = Repository('test') + repo = Repository(repo_id='test') config = {'relative_url': 'zoo_repo'} conflicting_distributor = {'repo_id': 'zoo_repo', 'config': {}} @@ -515,7 +515,7 @@ def test_relative_path_conflicts_with_repo_id(self): self.assertEqual(error_messages, [message]) def test_relative_path_conflicts_with_both(self): - repo = Repository('test') + repo = Repository(repo_id='test') config = {'relative_url': 'zoo_repo'} conflicting_distributor = [{'repo_id': 'zoo_repo', 'config': {'relative_url': 'zoo_repo'}}, @@ -540,48 +540,48 @@ def test_relative_path_conflicts_with_both(self): @mock.patch('pulp.repoauth.repo_cert_utils.RepoCertUtils.write_consumer_cert_bundle') def test_cert_based_auth_ca_and_cert(self, mock_write_consumer_cert_bundle, mock_add_protected_repo): - repo = Repository('test') + repo = Repository(repo_id='test') config = {'auth_ca': 'looks legit', 'auth_cert': '1234567890'} bundle = {'ca': config['auth_ca'], 'cert': config['auth_cert']} configuration.process_cert_based_auth(repo, config) - mock_write_consumer_cert_bundle.assert_called_once_with(repo.id, bundle) - mock_add_protected_repo.assert_called_once_with(repo.id, repo.id) + mock_write_consumer_cert_bundle.assert_called_once_with(repo.repo_id, bundle) + mock_add_protected_repo.assert_called_once_with(repo.repo_id, repo.repo_id) @mock.patch('pulp.repoauth.protected_repo_utils.ProtectedRepoUtils.delete_protected_repo') def test_cert_based_auth_ca_no_cert(self, mock_delete_protected_repo): - repo = Repository('test') + repo = Repository(repo_id='test') config = {'auth_ca': 'looks not so legit'} configuration.process_cert_based_auth(repo, config) - mock_delete_protected_repo.assert_called_once_with(repo.id) + mock_delete_protected_repo.assert_called_once_with(repo.repo_id) @mock.patch('pulp.repoauth.protected_repo_utils.ProtectedRepoUtils.delete_protected_repo') def test_cert_based_auth_no_ca_no_cert(self, mock_delete_protected_repo): - repo = Repository('test') + repo = Repository(repo_id='test') configuration.process_cert_based_auth(repo, {}) - mock_delete_protected_repo.assert_called_once_with(repo.id) + mock_delete_protected_repo.assert_called_once_with(repo.repo_id) @mock.patch('pulp.repoauth.protected_repo_utils.ProtectedRepoUtils.delete_protected_repo') def test_remove_cert_based_auth(self, mock_delete_protected_repo): - repo = Repository('test') + repo = Repository(repo_id='test') config = {} configuration.remove_cert_based_auth(repo, config) - mock_delete_protected_repo.assert_called_once_with(repo.id) + mock_delete_protected_repo.assert_called_once_with(repo.repo_id) class TestGetExportRepoPublishDirs(unittest.TestCase): def test_both_dirs(self): config = PluginCallConfiguration({}, {constants.PUBLISH_HTTP_KEYWORD: True, constants.PUBLISH_HTTPS_KEYWORD: True}) - repo = mock.Mock(id='foo') + repo = mock.Mock(repo_id='foo') dirs = configuration.get_export_repo_publish_dirs(repo, config) self.assertEquals(dirs, [ os.path.join(configuration.HTTP_EXPORT_DIR, 'foo'), @@ -589,7 +589,7 @@ def test_both_dirs(self): def test_no_dirs(self): config = PluginCallConfiguration({}, {}) - repo = mock.Mock(id='foo') + repo = mock.Mock(repo_id='foo') dirs = configuration.get_export_repo_publish_dirs(repo, config) self.assertEquals(dirs, []) diff --git a/plugins/test/unit/plugins/distributors/yum/test_distributor.py b/plugins/test/unit/plugins/distributors/yum/test_distributor.py index 2b903c50b..87b00376e 100644 --- a/plugins/test/unit/plugins/distributors/yum/test_distributor.py +++ b/plugins/test/unit/plugins/distributors/yum/test_distributor.py @@ -3,10 +3,10 @@ import os import shutil import tempfile -import unittest import mock from mock import Mock, patch, call +from pulp.common.compat import unittest from pulp.devel import mock_config from pulp.devel.unit import util from pulp.devel.unit.util import compare_dict @@ -43,19 +43,24 @@ def test_metadata(self): self.assertEqual(metadata['id'], TYPE_ID_DISTRIBUTOR_YUM) self.assertEqual(metadata['display_name'], distributor.DISTRIBUTOR_DISPLAY_NAME) + @patch('pulp_rpm.plugins.distributors.yum.distributor.platform_models') @mock.patch('pulp_rpm.plugins.distributors.yum.configuration.validate_config') - def test_validate_config(self, mock_validate_config): + def test_validate_config(self, mock_validate_config, m_platform_models): repo = Repository('test') + m_repo = Mock(repo_id='test') + m_platform_models.Repository.objects.get.return_value = m_repo config = PluginCallConfiguration(None, None) conduit = RepoConfigConduit(TYPE_ID_DISTRIBUTOR_YUM) self.distributor.validate_config(repo, config, conduit) - mock_validate_config.assert_called_once_with(repo, config, conduit) + mock_validate_config.assert_called_once_with(m_repo, config, conduit) + @patch('pulp_rpm.plugins.distributors.yum.distributor.platform_models') @mock.patch('pulp_rpm.plugins.distributors.yum.distributor.publish') - def test_publish_repo(self, mock_publish): + def test_publish_repo(self, mock_publish, m_platform_models): repo = Repository('test') + m_platform_models.Repository.objects.get.return_value = Mock(repo_id='test') config = PluginCallConfiguration(None, None) conduit = RepoPublishConduit(repo.id, TYPE_ID_DISTRIBUTOR_YUM) @@ -73,11 +78,15 @@ def test_cancel_publish_repo(self): self.distributor._publisher = None - def test_create_consumer_payload(self): + @patch('pulp_rpm.plugins.distributors.yum.distributor.pulp_server_config') + @patch('pulp_rpm.plugins.distributors.yum.distributor.platform_models') + def test_create_consumer_payload(self, m_platform_models, m_config): local_distributor = YumHTTPDistributor() repo = Mock() repo.display_name = 'foo' repo.id = 'bar' + m_repo = Mock(repo_id='bar', display_name='foo') + m_platform_models.Repository.objects.get.return_value = m_repo config = {'https_ca': 'pear', 'gpgkey': 'kiwi', 'auth_cert': 'durian', @@ -87,36 +96,48 @@ def test_create_consumer_payload(self): binding_config = {} cert_file = os.path.join(self.working_dir, "orange_file") - with mock_config.patch({'server': {'server_name': 'apple'}, - 'security': {'ssl_ca_certificate': cert_file}}): - with open(cert_file, 'w') as filewriter: - filewriter.write("orange") - - result = local_distributor.create_consumer_payload(repo, config, binding_config) - - target = { - 'server_name': 'apple', - 'ca_cert': 'orange', - 'relative_path': '/pulp/repos/bar', - 'gpg_keys': {'pulp.key': 'kiwi'}, - 'client_cert': 'durian', - 'protocols': ['http', 'https'], - 'repo_name': 'foo' - } - compare_dict(result, target) - - @mock_config.patch({'server': {'server_name': 'apple'}, - 'security': {'ssl_ca_certificate': 'orange'}}) + m_config.config = ConfigParser.SafeConfigParser() + m_config.config.add_section('server') + m_config.config.set('server', 'server_name', 'apple') + m_config.config.add_section('security') + m_config.config.set('security', 'ssl_ca_certificate', cert_file) + with open(cert_file, 'w') as filewriter: + filewriter.write("orange") + + result = local_distributor.create_consumer_payload(repo, config, binding_config) + + target = { + 'server_name': 'apple', + 'ca_cert': 'orange', + 'relative_path': '/pulp/repos/bar', + 'gpg_keys': {'pulp.key': 'kiwi'}, + 'client_cert': 'durian', + 'protocols': ['http', 'https'], + 'repo_name': 'foo' + } + self.assertDictEqual(result, target) + + @patch('pulp_rpm.plugins.distributors.yum.distributor.pulp_server_config') + @patch('pulp_rpm.plugins.distributors.yum.distributor.platform_models') @patch('pulp_rpm.plugins.distributors.yum.distributor.configuration.load_config') - def test_create_consumer_payload_global_auth(self, mock_load_config): + def test_create_consumer_payload_global_auth(self, mock_load_config, m_platform_models, + m_config): test_distributor = YumHTTPDistributor() repo = Mock() repo.display_name = 'foo' repo.id = 'bar' + m_repo = Mock(repo_id='bar', display_name='foo') + m_platform_models.Repository.objects.get.return_value = m_repo config = {'https_ca': 'pear', 'gpgkey': 'kiwi', 'http': True, 'https': True} + m_config.config = ConfigParser.SafeConfigParser() + m_config.config.add_section('server') + m_config.config.set('server', 'server_name', 'apple') + m_config.config.add_section('security') + m_config.config.set('security', 'ssl_ca_certificate', 'orange') + binding_config = {} repo_auth_config = ConfigParser.SafeConfigParser() @@ -148,98 +169,126 @@ def test_create_consumer_payload_global_auth(self, mock_load_config): compare_dict(result, target) +@patch('pulp_rpm.plugins.distributors.yum.distributor.platform_models') +@patch('pulp_rpm.plugins.distributors.yum.distributor.configuration.get_repo_relative_path') +@patch('pulp_rpm.plugins.distributors.yum.distributor.configuration.get_master_publish_dir') +@patch('pulp_rpm.plugins.distributors.yum.distributor.configuration.get_http_publish_dir') +@patch('pulp_rpm.plugins.distributors.yum.distributor.configuration.get_https_publish_dir') +@patch('pulp_rpm.plugins.distributors.yum.distributor.configuration.remove_cert_based_auth') +@patch('pulp_rpm.plugins.distributors.yum.distributor.shutil.rmtree') +@patch('pulp_rpm.plugins.distributors.yum.distributor.os') +@patch('pulp_rpm.plugins.distributors.yum.distributor.YumHTTPDistributor.' + 'clean_simple_hosting_directories') class TestDistributorDistributorRemoved(unittest.TestCase): - def _apply_mock_patches(self): - self.patch_a = patch(CONFIGURATION + '.get_repo_relative_path') - self.mock_rel_path = self.patch_a.start() - - self.patch_b = patch(CONFIGURATION + '.get_master_publish_dir') - self.mock_master = self.patch_b.start() - - self.patch_c = patch(CONFIGURATION + '.get_http_publish_dir') - self.mock_http = self.patch_c.start() - - self.patch_d = patch(CONFIGURATION + '.get_https_publish_dir') - self.mock_https = self.patch_d.start() - - self.patch_e = patch(CONFIGURATION + '.remove_cert_based_auth') - self.mock_remove_cert = self.patch_e.start() - - self.patch_f = patch(DISTRIBUTOR + '.shutil.rmtree') - self.mock_rmtree = self.patch_f.start() - - self.patch_g = patch(DISTRIBUTOR + '.os') - self.mock_os = self.patch_g.start() - - self.patch_h = patch(DISTRIBUTOR + '.YumHTTPDistributor.clean_simple_hosting_directories') - self.mock_clean = self.patch_h.start() def setUp(self): - self._apply_mock_patches() + # self._apply_mock_patches() self.working_dir = tempfile.mkdtemp() self.distributor = distributor.YumHTTPDistributor() - self.mock_repo = Mock() + self.mock_transfer_repo = Mock(id='foo') + self.mock_repo = Mock(repo_id='foo') self.config = {} - self.distributor.distributor_removed(self.mock_repo, self.config) def tearDown(self): - self.patch_a.stop() - self.patch_b.stop() - self.patch_c.stop() - self.patch_d.stop() - self.patch_e.stop() - self.patch_f.stop() - self.patch_g.stop() - self.patch_h.stop() - - def test_distributor_remove_distributor_calls_get_master_publish_dir(self): - self.mock_master.assert_called_once_with(self.mock_repo, TYPE_ID_DISTRIBUTOR_YUM) - - def test_distributor_remove_distributor_calls_get_http_publish_dir(self): - self.mock_http.assert_called_once_with(self.config) - - def test_distributor_remove_distributor_calls_get_https_publish_dir(self): - self.mock_https.assert_called_once_with(self.config) + shutil.rmtree(self.working_dir) - def test_distributor_remove_distributor_calls_get_repo_relative_path_twice(self): + def test_distributor_remove_distributor_calls_get_master_publish_dir( + self, m_csh, m_os, m_rmtree, m_remove_cert_based_auth, m_get_https_publish_dir, + m_get_http_publish_dir, m_get_master_publish_dir, m_get_repo_relative_path, + m_platform_models): + + m_platform_models.Repository.objects.get.return_value = self.mock_repo + self.distributor.distributor_removed(self.mock_transfer_repo, self.config) + m_get_master_publish_dir.assert_called_once_with(self.mock_repo, TYPE_ID_DISTRIBUTOR_YUM) + + def test_distributor_remove_distributor_calls_get_http_publish_dir( + self, m_csh, m_os, m_rmtree, m_remove_cert_based_auth, m_get_https_publish_dir, + m_get_http_publish_dir, m_get_master_publish_dir, m_get_repo_relative_path, + m_platform_models): + m_platform_models.Repository.objects.get.return_value = self.mock_repo + self.distributor.distributor_removed(self.mock_transfer_repo, self.config) + m_get_http_publish_dir.assert_called_once_with(self.config) + + def test_distributor_remove_distributor_calls_get_https_publish_dir( + self, m_csh, m_os, m_rmtree, m_remove_cert_based_auth, m_get_https_publish_dir, + m_get_http_publish_dir, m_get_master_publish_dir, m_get_repo_relative_path, + m_platform_models): + m_platform_models.Repository.objects.get.return_value = self.mock_repo + self.distributor.distributor_removed(self.mock_transfer_repo, self.config) + m_get_https_publish_dir.assert_called_once_with(self.config) + + def test_distributor_remove_distributor_calls_get_repo_relative_path_twice( + self, m_csh, m_os, m_rmtree, m_remove_cert_based_auth, m_get_https_publish_dir, + m_get_http_publish_dir, m_get_master_publish_dir, m_get_repo_relative_path, + m_platform_models): + m_platform_models.Repository.objects.get.return_value = self.mock_repo + self.distributor.distributor_removed(self.mock_transfer_repo, self.config) rel_path_calls = [ call(self.mock_repo, self.config), call(self.mock_repo, self.config), ] - self.mock_rel_path.assert_has_calls(rel_path_calls) - - def test_distributor_remove_distributor_calls_remove_cert_based_auth(self): - self.mock_remove_cert.assert_called_once_with(self.mock_repo, self.config) - - def test_distributor_remove_distributor_calls_clean_simple_hosting_directories(self): - self.assertEqual(self.mock_clean.call_count, 2) - - def test_distributor_remove_distributor_uses_rmtree_to_remove_working_dir_and_master_dir(self): + m_get_repo_relative_path.assert_has_calls(rel_path_calls) + + def test_distributor_remove_distributor_calls_remove_cert_based_auth( + self, m_csh, m_os, m_rmtree, m_remove_cert_based_auth, m_get_https_publish_dir, + m_get_http_publish_dir, m_get_master_publish_dir, m_get_repo_relative_path, + m_platform_models): + m_platform_models.Repository.objects.get.return_value = self.mock_repo + self.distributor.distributor_removed(self.mock_transfer_repo, self.config) + m_remove_cert_based_auth.assert_called_once_with(self.mock_repo, self.config) + + def test_distributor_remove_distributor_calls_clean_simple_hosting_directories( + self, m_csh, m_os, m_rmtree, m_remove_cert_based_auth, m_get_https_publish_dir, + m_get_http_publish_dir, m_get_master_publish_dir, m_get_repo_relative_path, + m_platform_models): + m_platform_models.Repository.objects.get.return_value = self.mock_repo + self.distributor.distributor_removed(self.mock_transfer_repo, self.config) + self.assertEqual(m_csh.call_count, 2) + + def test_distributor_remove_distributor_uses_rmtree_to_remove_working_dir_and_master_dir( + self, m_csh, m_os, m_rmtree, m_remove_cert_based_auth, m_get_https_publish_dir, + m_get_http_publish_dir, m_get_master_publish_dir, m_get_repo_relative_path, + m_platform_models): + m_platform_models.Repository.objects.get.return_value = self.mock_repo + self.distributor.distributor_removed(self.mock_transfer_repo, self.config) rmtree_calls = [ - call(self.mock_master.return_value, ignore_errors=True) + call(m_get_master_publish_dir.return_value, ignore_errors=True) ] - self.mock_rmtree.assert_has_calls(rmtree_calls) - - def test_distributor_remove_distributor_uses_unlink_to_remove_http_and_https_symlinks(self): + m_rmtree.assert_has_calls(rmtree_calls) + + def test_distributor_remove_distributor_uses_unlink_to_remove_http_and_https_symlinks( + self, m_csh, m_os, m_rmtree, m_remove_cert_based_auth, m_get_https_publish_dir, + m_get_http_publish_dir, m_get_master_publish_dir, m_get_repo_relative_path, + m_platform_models): + m_platform_models.Repository.objects.get.return_value = self.mock_repo + self.distributor.distributor_removed(self.mock_transfer_repo, self.config) unlink_calls = [ - call(self.mock_os.path.join.return_value.rstrip.return_value), - call(self.mock_os.path.join.return_value.rstrip.return_value) + call(m_os.path.join.return_value.rstrip.return_value), + call(m_os.path.join.return_value.rstrip.return_value) ] - self.mock_os.unlink.assert_has_calls(unlink_calls) + m_os.unlink.assert_has_calls(unlink_calls) - def test_distributor_remove_distributor_unlink_call_handles_OSError_raised(self): + def test_distributor_remove_distributor_unlink_call_handles_OSError_raised( + self, m_csh, m_os, m_rmtree, m_remove_cert_based_auth, m_get_https_publish_dir, + m_get_http_publish_dir, m_get_master_publish_dir, m_get_repo_relative_path, + m_platform_models): + m_platform_models.Repository.objects.get.return_value = self.mock_repo os_error_to_raise = OSError() os_error_to_raise.errno = errno.ENOENT - self.mock_os.unlink.side_effect = os_error_to_raise + m_os.unlink.side_effect = os_error_to_raise try: - self.distributor.distributor_removed(self.mock_repo, self.config) + self.distributor.distributor_removed(self.mock_transfer_repo, self.config) except Exception: self.fail('Distributor unlink should handle symlinks that do not exist.') - def test_distributor_remove_distributor_unlink_call_handles_non_oserror_raised(self): + def test_distributor_remove_distributor_unlink_call_handles_non_oserror_raised( + self, m_csh, m_os, m_rmtree, m_remove_cert_based_auth, m_get_https_publish_dir, + m_get_http_publish_dir, m_get_master_publish_dir, m_get_repo_relative_path, + m_platform_models): + m_platform_models.Repository.objects.get.return_value = self.mock_repo os_error_to_raise = OSError() - self.mock_os.unlink.side_effect = os_error_to_raise - self.assertRaises(OSError, self.distributor.distributor_removed, self.mock_repo, + m_os.unlink.side_effect = os_error_to_raise + self.assertRaises(OSError, self.distributor.distributor_removed, self.mock_transfer_repo, self.config) diff --git a/plugins/test/unit/plugins/distributors/yum/test_publish.py b/plugins/test/unit/plugins/distributors/yum/test_publish.py index 28cf30c65..0d8687dff 100644 --- a/plugins/test/unit/plugins/distributors/yum/test_publish.py +++ b/plugins/test/unit/plugins/distributors/yum/test_publish.py @@ -148,7 +148,7 @@ def test_publish(self, mock_publish_distribution, mock_publish_rpms, mock_publis self._init_publisher() self.publisher.repo.content_unit_counts = {} - self.publisher.publish() + self.publisher.process_lifecycle() mock_publish_distribution.assert_called_once() mock_publish_rpms.assert_called_once() diff --git a/plugins/test/unit/plugins/migrations/test_0011_migrate_new_importer.py b/plugins/test/unit/plugins/migrations/test_0011_migrate_new_importer.py index 40cb6eb0e..580ddc50e 100644 --- a/plugins/test/unit/plugins/migrations/test_0011_migrate_new_importer.py +++ b/plugins/test/unit/plugins/migrations/test_0011_migrate_new_importer.py @@ -8,9 +8,8 @@ import mock from pulp.server.db.migrate.models import _import_all_the_way -from pulp_rpm.plugins.db.models import RPM, SRPM - - +RPM_TYPE = 'rpm' +SRPM_TYPE = 'srpm' migration = _import_all_the_way('pulp_rpm.plugins.migrations.0011_new_importer') DATA_DIR = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'data', '11_migrate_new_importer') @@ -25,15 +24,15 @@ def setUp(self): def test_types(self, mock_add): migration.migrate() self.assertEqual(mock_add.call_count, 2) - mock_add.assert_any_call(RPM.TYPE) - mock_add.assert_any_call(SRPM.TYPE) + mock_add.assert_any_call(RPM_TYPE) + mock_add.assert_any_call(SRPM_TYPE) @mock.patch('pulp.plugins.types.database.type_units_collection') def test_adds_size(self, mock_collection): mock_collection.return_value.find.return_value = [self.rpm_unit] self.assertFalse('size' in self.rpm_unit) - migration._migrate_collection(RPM.TYPE) + migration._migrate_collection(RPM_TYPE) result = mock_collection.return_value.save.call_args[0][0] self.assertTrue('size' in result) @@ -44,7 +43,7 @@ def test_adds_sourcerpm(self, mock_collection): mock_collection.return_value.find.return_value = [self.rpm_unit] self.assertFalse('sourcerpm' in self.rpm_unit) - migration._migrate_collection(RPM.TYPE) + migration._migrate_collection(RPM_TYPE) result = mock_collection.return_value.save.call_args[0][0] self.assertTrue('sourcerpm' in result) @@ -55,7 +54,7 @@ def test_adds_summary(self, mock_collection): mock_collection.return_value.find.return_value = [self.rpm_unit] self.assertFalse('summary' in self.rpm_unit) - migration._migrate_collection(RPM.TYPE) + migration._migrate_collection(RPM_TYPE) result = mock_collection.return_value.save.call_args[0][0] self.assertTrue('summary' in result) @@ -65,7 +64,7 @@ def test_adds_summary(self, mock_collection): def test_preserve_xml(self, mock_collection): mock_collection.return_value.find.return_value = [self.rpm_unit] - migration._migrate_collection(RPM.TYPE) + migration._migrate_collection(RPM_TYPE) result = mock_collection.return_value.save.call_args[0][0] # ensure no changes to actual XML @@ -76,7 +75,7 @@ def test_preserve_xml(self, mock_collection): def test_reformats_provides(self, mock_collection): mock_collection.return_value.find.return_value = [self.rpm_unit] - migration._migrate_collection(RPM.TYPE) + migration._migrate_collection(RPM_TYPE) result = mock_collection.return_value.save.call_args[0][0] provides = result['provides'] @@ -98,7 +97,7 @@ def test_reformats_provides(self, mock_collection): def test_reformats_requires(self, mock_collection): mock_collection.return_value.find.return_value = [self.rpm_unit] - migration._migrate_collection(RPM.TYPE) + migration._migrate_collection(RPM_TYPE) result = mock_collection.return_value.save.call_args[0][0] requires = result['requires'] @@ -114,7 +113,7 @@ def test_srpm_doesnt_have_sourcerpm_or_summary(self, mock_collection): self.assertTrue('summary' not in self.srpm_unit) mock_collection.return_value.find.return_value = [self.srpm_unit] - migration._migrate_collection(SRPM.TYPE) + migration._migrate_collection(SRPM_TYPE) result = mock_collection.return_value.save.call_args[0][0] self.assertTrue('sourcerpm' not in result) self.assertTrue('summary' not in result) diff --git a/plugins/test/unit/plugins/migrations/test_0016_new_yum_distributor.py b/plugins/test/unit/plugins/migrations/test_0016_new_yum_distributor.py index d9f9964bf..75aa7bc5c 100644 --- a/plugins/test/unit/plugins/migrations/test_0016_new_yum_distributor.py +++ b/plugins/test/unit/plugins/migrations/test_0016_new_yum_distributor.py @@ -164,7 +164,7 @@ def test_clear_orphaned_publish_dirs(self): self.assertFalse(os.path.exists(os.path.join(path, 'listing'))) @mock.patch('pulp_rpm.plugins.distributors.yum.publish.Publisher.get_working_dir') - @mock.patch('pulp_rpm.plugins.distributors.yum.publish.Publisher.publish') + @mock.patch('pulp_rpm.plugins.distributors.yum.publish.Publisher.process_lifecycle') def test_re_publish_repository(self, mock_publish, m_wd): repo_id = 'test_repo' diff --git a/plugins/types/iso_support.json b/plugins/types/iso_support.json deleted file mode 100644 index dd5e0c562..000000000 --- a/plugins/types/iso_support.json +++ /dev/null @@ -1,9 +0,0 @@ -{"types": [ - { - "id": "iso", - "display_name": "ISO", - "description": "ISO", - "unit_key": ["name", "checksum", "size"], - "search_indexes": [] - } -]} diff --git a/plugins/types/rpm_support.json b/plugins/types/rpm_support.json deleted file mode 100644 index 363619b78..000000000 --- a/plugins/types/rpm_support.json +++ /dev/null @@ -1,98 +0,0 @@ -{"types": [ - { - "id" : "distribution", - "display_name" : "Distribution", - "description" : "Kickstart trees and all accompanying files", - "unit_key" : - ["id", "family", "variant", "version", "arch"], - "search_indexes" : - ["id", "family", "variant", "version", "arch"] - }, - - { - "id" : "drpm", - "display_name" : "DRPM", - "description" : "DRPM", - "unit_key" : - ["epoch", "version", "release", "filename", "checksumtype", "checksum"], - "search_indexes" : - ["epoch", "version", "release", "checksum", "filename"] - }, - - { - "id" : "erratum", - "display_name" : "Erratum", - "description" : "Erratum advisory information", - "unit_key" : - ["id"], - "search_indexes" : [ - "id", "version", "release", "type", - "status", "updated", "issued", "severity", "references" - ], - "referenced_types" : ["rpm"] - }, - - { - "id" : "package_group", - "display_name" : "Package Group", - "description" : "Yum Package group information", - "unit_key" : - ["id", "repo_id"], - "search_indexes" : - ["id", "repo_id", "name", "mandatory_package_names", "conditional_package_names", - "optional_package_names", "default_package_names"] - }, - - { - "id" : "package_category", - "display_name" : "Package Category", - "description" : "Yum Package category information", - "unit_key" : - ["id", "repo_id"], - "search_indexes" : - ["id", "repo_id", "name", "packagegroupids"] - }, - - { - "id" : "package_environment", - "display_name" : "Package Environment", - "description" : "Yum Package environment information", - "unit_key" : - ["id", "repo_id"], - "search_indexes" : - ["id", "repo_id", "name", "group_ids"] - }, - - { - "id" : "rpm", - "display_name" : "RPM", - "description" : "RPM", - "unit_key" : - ["name", "epoch", "version", "release", "arch", "checksumtype", "checksum"], - "search_indexes" : - ["name", "epoch", "version", "release", "arch", "filename", "checksum", - "checksumtype", "version_sort_index", ["version_sort_index", "release_sort_index"]], - "referenced_types" : ["erratum"] - }, - - { - "id" : "srpm", - "display_name" : "SRPM", - "description" : "SRPM", - "unit_key" : - ["name", "epoch", "version", "release", "arch", "checksumtype", "checksum"], - "search_indexes" : - ["name", "epoch", "version", "release", "arch", "filename", "checksum", - "checksumtype", "version_sort_index", ["version_sort_index", "release_sort_index"]] - }, - - { - "id" : "yum_repo_metadata_file", - "display_name" : "YUM Repository Metadata File", - "description" : "YUM Repository Metadata File", - "unit_key" : - ["repo_id", "data_type"], - "search_indexes" : - ["data_type"] - } -]}