Skip to content

Commit

Permalink
publishing kickstarts
Browse files Browse the repository at this point in the history
closes #5206
https://pulp.plan.io/issues/5206
Required PR: pulp#1418
Required PR: pulp#1427
Required PR: pulp#1440
  • Loading branch information
fao89 committed Sep 17, 2019
1 parent 0a0117a commit 8a8048e
Show file tree
Hide file tree
Showing 3 changed files with 341 additions and 100 deletions.
1 change: 1 addition & 0 deletions CHANGES/5206.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Sync and Publish kickstart trees.
309 changes: 209 additions & 100 deletions pulp_rpm/app/tasks/publishing.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,14 @@

from pulpcore.plugin.tasking import WorkingDirectory

from pulp_rpm.app.models import Package, RepoMetadataFile, RpmPublication, UpdateRecord
from pulp_rpm.app.models import (
DistributionTree,
Package,
RepoMetadataFile,
RpmPublication,
UpdateRecord,
)
from pulp_rpm.app.tasks.utils import create_treeinfo

log = logging.getLogger(__name__)

Expand All @@ -30,6 +37,8 @@ class PublicationData:
Attributes:
publication (pulpcore.plugin.models.Publication): A Publication to populate.
packages (pulp_rpm.models.Package): A list of published packages.
published_artifacts (pulpcore.plugin.models.PublishedArtifact): A published artifacts list.
sub_repos (list): A list of tuples with sub_repos data.
repomdrecords (list): A list of tuples with repomdrecords data.
"""
Expand All @@ -44,26 +53,57 @@ def __init__(self, publication):
"""
self.publication = publication
self.packages = []
self.published_artifacts = []
self.sub_repos = []
self.repomdrecords = []

def prepare_metadata_files(self):
def prepare_metadata_files(self, contents, folder=None):
"""
Copies metadata files from the Artifact storage.
Args:
contents (pulpcore.plugin.models.Content): A list of contents.
Keyword Args:
folder(str): name of the directory.
Returns:
repomdrecords (list): A list of tuples with repomdrecords data.
"""
publication = self.publication
repomdrecords = []
repo_metadata_files = RepoMetadataFile.objects.filter(
pk__in=publication.repository_version.content).prefetch_related('contentartifact_set')
pk__in=contents).prefetch_related('contentartifact_set')

for repo_metadata_file in repo_metadata_files:
content_artifact = repo_metadata_file.contentartifact_set.get()
current_file = content_artifact.artifact.file.file
path = content_artifact.relative_path.split("/")[-1]
if repo_metadata_file.checksum in path:
path = path.split("-")[-1]
if folder:
path = os.path.join(folder, path)
with open(path, "wb") as new_file:
shutil.copyfileobj(current_file, new_file)
self.repomdrecords.append((repo_metadata_file.data_type, new_file.name, None))
repomdrecords.append((repo_metadata_file.data_type, new_file.name, None))

return repomdrecords

def get_packages(self, contents):
"""
Get packages from content.
Args:
contents (pulpcore.plugin.models.Content): A list of contents.
Returns:
packages (pulp_rpm.models.Package): A list of packages.
"""
packages = Package.objects.filter(pk__in=contents).\
prefetch_related('contentartifact_set')

return packages

def populate(self):
"""
Expand All @@ -73,20 +113,62 @@ def populate(self):
"""
publication = self.publication
main_content = publication.repository_version.content

distribution_trees = DistributionTree.objects.filter(
pk__in=publication.repository_version.content
).prefetch_related(
"addons",
"variants",
"addons__repository",
"variants__repository",
"contentartifact_set"
)

for distribution_tree in distribution_trees:
for content_artifact in distribution_tree.contentartifact_set.all():
self.published_artifacts.append(PublishedArtifact(
relative_path=content_artifact.relative_path,
publication=publication,
content_artifact=content_artifact)
)
for addon in distribution_tree.addons.all():
repository_version = RepositoryVersion.latest(addon.repository)
if repository_version and repository_version.content != main_content:
self.sub_repos.append((addon.addon_id, repository_version.content))
for variant in distribution_tree.variants.all():
repository_version = RepositoryVersion.latest(variant.repository)
if repository_version and repository_version.content != main_content:
self.sub_repos.append((variant.variant_id, repository_version.content))

treeinfo_file = create_treeinfo(distribution_tree)
metadata = PublishedMetadata(
relative_path=treeinfo_file.name,
publication=publication,
file=File(open(treeinfo_file.name, 'rb'))
)
metadata.save()

self.packages = Package.objects.filter(pk__in=publication.repository_version.content).\
prefetch_related('contentartifact_set')
published_artifacts = []
self.packages = self.get_packages(main_content)
self.repomdrecords = self.prepare_metadata_files(main_content)

for package in self.packages:
all_packages = self.packages
for name, content in self.sub_repos:
os.mkdir(name)
sub_repo_packages = self.get_packages(content)
all_packages = all_packages | sub_repo_packages
setattr(self, f"{name}_packages", sub_repo_packages)
setattr(self, f"{name}_repomdrecords", self.prepare_metadata_files(content, name))

for package in all_packages.distinct():
for content_artifact in package.contentartifact_set.all():
published_artifacts.append(PublishedArtifact(
self.published_artifacts.append(PublishedArtifact(
relative_path=content_artifact.relative_path,
publication=publication,
publication=self.publication,
content_artifact=content_artifact)
)

PublishedArtifact.objects.bulk_create(published_artifacts)
PublishedArtifact.objects.bulk_create(self.published_artifacts)


def update_record_xml(update_record):
Expand Down Expand Up @@ -165,95 +247,122 @@ def publish(repository_version_pk):
with RpmPublication.create(repository_version) as publication:
publication_data = PublicationData(publication)
publication_data.populate()

packages = publication_data.packages

# Prepare metadata files
repomd_path = os.path.join(os.getcwd(), "repomd.xml")
pri_xml_path = os.path.join(os.getcwd(), "primary.xml.gz")
fil_xml_path = os.path.join(os.getcwd(), "filelists.xml.gz")
oth_xml_path = os.path.join(os.getcwd(), "other.xml.gz")
pri_db_path = os.path.join(os.getcwd(), "primary.sqlite")
fil_db_path = os.path.join(os.getcwd(), "filelists.sqlite")
oth_db_path = os.path.join(os.getcwd(), "other.sqlite")
upd_xml_path = os.path.join(os.getcwd(), "updateinfo.xml.gz")

pri_xml = cr.PrimaryXmlFile(pri_xml_path)
fil_xml = cr.FilelistsXmlFile(fil_xml_path)
oth_xml = cr.OtherXmlFile(oth_xml_path)
pri_db = cr.PrimarySqlite(pri_db_path)
fil_db = cr.FilelistsSqlite(fil_db_path)
oth_db = cr.OtherSqlite(oth_db_path)
upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

pri_xml.set_num_of_pkgs(len(packages))
fil_xml.set_num_of_pkgs(len(packages))
oth_xml.set_num_of_pkgs(len(packages))

# Process all packages
for package in packages:
pkg = package.to_createrepo_c()
pkg.location_href = package.contentartifact_set.first().relative_path
pri_xml.add_pkg(pkg)
fil_xml.add_pkg(pkg)
oth_xml.add_pkg(pkg)
pri_db.add_pkg(pkg)
fil_db.add_pkg(pkg)
oth_db.add_pkg(pkg)

# Process update records
for update_record in UpdateRecord.objects.filter(
pk__in=publication.repository_version.content):
upd_xml.add_chunk(update_record_xml(update_record))

pri_xml.close()
fil_xml.close()
oth_xml.close()
upd_xml.close()

repomd = cr.Repomd()

publication_data.prepare_metadata_files()

repomdrecords = [("primary", pri_xml_path, pri_db),
("filelists", fil_xml_path, fil_db),
("other", oth_xml_path, oth_db),
("primary_db", pri_db_path, None),
("filelists_db", fil_db_path, None),
("other_db", oth_db_path, None),
("updateinfo", upd_xml_path, None)]

repomdrecords.extend(publication_data.repomdrecords)

sqlite_files = ("primary_db", "filelists_db", "other_db")
for name, path, db_to_update in repomdrecords:
record = cr.RepomdRecord(name, path)
if name in sqlite_files:
record_bz = record.compress_and_fill(cr.SHA256, cr.BZ2)
record_bz.type = name
record_bz.rename_file()
path = record_bz.location_href.split('/')[-1]
repomd.set_record(record_bz)
else:
record.fill(cr.SHA256)
if (db_to_update):
db_to_update.dbinfo_update(record.checksum)
db_to_update.close()
record.rename_file()
path = record.location_href.split('/')[-1]
repomd.set_record(record)
metadata = PublishedMetadata(
relative_path=os.path.join(REPODATA_PATH, os.path.basename(path)),
publication=publication,
file=File(open(os.path.basename(path), 'rb'))
)
metadata.save()
# Main repo
create_rempomd_xml(packages, publication, publication_data.repomdrecords)

with open(repomd_path, "w") as repomd_f:
repomd_f.write(repomd.xml_dump())
for sub_repo in publication_data.sub_repos:
name = sub_repo[0]
packages = getattr(publication_data, f"{name}_packages")
extra_repomdrecords = getattr(publication_data, f"{name}_repomdrecords")
create_rempomd_xml(packages, publication, extra_repomdrecords, name)

metadata = PublishedMetadata(
relative_path=os.path.join(REPODATA_PATH, os.path.basename(repomd_path)),
publication=publication,
file=File(open(os.path.basename(repomd_path), 'rb'))
)
metadata.save()

def create_rempomd_xml(packages, publication, extra_repomdrecords, sub_folder=None):
"""
Creates a repomd.xml file.
Args:
packages(app.models.Package): set of packages
publication(pulpcore.plugin.models.Publication): the publication
extra_repomdrecords(list): list with data relative to repo metadata files
sub_folder(str): name of the folder for sub repos
"""
cwd = os.getcwd()
repodata_path = REPODATA_PATH

if sub_folder:
cwd = os.path.join(cwd, sub_folder)
repodata_path = os.path.join(repodata_path, sub_folder)

# Prepare metadata files
repomd_path = os.path.join(cwd, "repomd.xml")
pri_xml_path = os.path.join(cwd, "primary.xml.gz")
fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
oth_xml_path = os.path.join(cwd, "other.xml.gz")
pri_db_path = os.path.join(cwd, "primary.sqlite")
fil_db_path = os.path.join(cwd, "filelists.sqlite")
oth_db_path = os.path.join(cwd, "other.sqlite")
upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")

pri_xml = cr.PrimaryXmlFile(pri_xml_path)
fil_xml = cr.FilelistsXmlFile(fil_xml_path)
oth_xml = cr.OtherXmlFile(oth_xml_path)
pri_db = cr.PrimarySqlite(pri_db_path)
fil_db = cr.FilelistsSqlite(fil_db_path)
oth_db = cr.OtherSqlite(oth_db_path)
upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

pri_xml.set_num_of_pkgs(len(packages))
fil_xml.set_num_of_pkgs(len(packages))
oth_xml.set_num_of_pkgs(len(packages))

# Process all packages
for package in packages:
pkg = package.to_createrepo_c()
pkg.location_href = package.contentartifact_set.first().relative_path
pri_xml.add_pkg(pkg)
fil_xml.add_pkg(pkg)
oth_xml.add_pkg(pkg)
pri_db.add_pkg(pkg)
fil_db.add_pkg(pkg)
oth_db.add_pkg(pkg)

# Process update records
for update_record in UpdateRecord.objects.filter(
pk__in=publication.repository_version.content):
upd_xml.add_chunk(update_record_xml(update_record))

pri_xml.close()
fil_xml.close()
oth_xml.close()
upd_xml.close()

repomd = cr.Repomd()

repomdrecords = [("primary", pri_xml_path, pri_db),
("filelists", fil_xml_path, fil_db),
("other", oth_xml_path, oth_db),
("primary_db", pri_db_path, None),
("filelists_db", fil_db_path, None),
("other_db", oth_db_path, None),
("updateinfo", upd_xml_path, None)]

repomdrecords.extend(extra_repomdrecords)

sqlite_files = ("primary_db", "filelists_db", "other_db")
for name, path, db_to_update in repomdrecords:
record = cr.RepomdRecord(name, path)
if name in sqlite_files:
record_bz = record.compress_and_fill(cr.SHA256, cr.BZ2)
record_bz.type = name
record_bz.rename_file()
path = record_bz.location_href.split('/')[-1]
repomd.set_record(record_bz)
else:
record.fill(cr.SHA256)
if (db_to_update):
db_to_update.dbinfo_update(record.checksum)
db_to_update.close()
record.rename_file()
path = record.location_href.split('/')[-1]
repomd.set_record(record)
metadata = PublishedMetadata(
relative_path=os.path.join(repodata_path, os.path.basename(path)),
publication=publication,
file=File(open(os.path.basename(path), 'rb'))
)
metadata.save()

with open(repomd_path, "w") as repomd_f:
repomd_f.write(repomd.xml_dump())

metadata = PublishedMetadata(
relative_path=os.path.join(repodata_path, os.path.basename(repomd_path)),
publication=publication,
file=File(open(os.path.basename(repomd_path), 'rb'))
)
metadata.save()
Loading

0 comments on commit 8a8048e

Please sign in to comment.