Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions charon/cmd/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def upload(
version: str,
target: str,
root_path="maven-repository",
ignore_patterns=None,
ignore_patterns: List[str] = None,
debug=False,
quiet=False,
dryrun=False
Expand All @@ -128,6 +128,7 @@ def upload(
archive_path = __get_local_repo(repo)
npm_archive_type = detect_npm_archive(archive_path)
product_key = f"{product}-{version}"
prefix_ = conf.get_bucket_prefix(target)
if npm_archive_type != NpmArchiveType.NOT_NPM:
logger.info("This is a npm archive")
handle_npm_uploading(archive_path, product_key,
Expand All @@ -144,6 +145,7 @@ def upload(
ignore_patterns_list,
root=root_path,
bucket_name=aws_bucket,
prefix=prefix_,
dry_run=dryrun)


Expand Down Expand Up @@ -221,7 +223,7 @@ def delete(
version: str,
target: str,
root_path="maven-repository",
ignore_patterns=None,
ignore_patterns: List[str] = None,
debug=False,
quiet=False,
dryrun=False
Expand All @@ -245,6 +247,7 @@ def delete(
archive_path = __get_local_repo(repo)
npm_archive_type = detect_npm_archive(archive_path)
product_key = f"{product}-{version}"
prefix_ = conf.get_bucket_prefix(target)
if npm_archive_type != NpmArchiveType.NOT_NPM:
logger.info("This is a npm archive")
handle_npm_del(archive_path, product_key,
Expand All @@ -261,6 +264,7 @@ def delete(
ignore_patterns_list,
root=root_path,
bucket_name=aws_bucket,
prefix=prefix_,
dry_run=dryrun)


Expand Down
6 changes: 4 additions & 2 deletions charon/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
import os
import logging

from charon.utils.strings import remove_prefix

CONFIG_FILE = "charon.yaml"

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -62,8 +64,8 @@ def get_bucket_prefix(self, target: str) -> str:
"in charon configuration, so no prefix will "
"be used", target)
prefix = ""
if prefix.startswith("/"):
prefix = prefix[1:]
# removing first slash as it is not needed.
prefix = remove_prefix(prefix, "/")
return prefix


Expand Down
38 changes: 31 additions & 7 deletions charon/pkgs/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
import logging
from typing import List, Set

from charon.utils.strings import remove_prefix

logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -50,7 +52,11 @@ def generate_index_file_content(self) -> str:


def generate_indexes(
top_level: str, changed_dirs: List[str], s3_client: S3Client, bucket: str
top_level: str,
changed_dirs: List[str],
s3_client: S3Client,
bucket: str,
prefix: str
) -> List[str]:
if top_level[-1] != '/':
top_level += '/'
Expand All @@ -70,13 +76,13 @@ def generate_indexes(
s3_folders = sorted(s3_folders, key=FolderLenCompareKey)
for folder_ in s3_folders:
index_html = __generate_index_html(
s3_client, bucket, folder_, top_level
s3_client, bucket, folder_, top_level, prefix
)
if index_html:
generated_htmls.append(index_html)

root_index = __generate_index_html(
s3_client, bucket, "/", top_level
s3_client, bucket, "/", top_level, prefix
)
if root_index:
generated_htmls.append(root_index)
Expand All @@ -88,11 +94,16 @@ def __generate_index_html(
s3_client: S3Client,
bucket: str,
folder_: str,
top_level: str
top_level: str,
prefix: str = None
) -> str:
if folder_ != "/":
search_folder = os.path.join(prefix, folder_) if prefix else folder_
else:
search_folder = prefix if prefix else "/"
contents = s3_client.list_folder_content(
bucket_name=bucket,
folder=folder_
folder=search_folder
)
index = None
if len(contents) == 1 and contents[0].endswith("index.html"):
Expand All @@ -106,10 +117,22 @@ def __generate_index_html(
file_paths=[removed_index],
bucket_name=bucket,
product=None,
root=top_level
root=top_level,
key_prefix=prefix
)
elif len(contents) >= 1:
index = __to_html(contents, folder_, top_level)
real_contents = []
if prefix and prefix.strip() != "":
for c in contents:
if c.startswith(prefix):
real_c = remove_prefix(c, prefix)
real_c = remove_prefix(real_c, "/")
real_contents.append(real_c)
else:
real_contents.append(c)
else:
real_contents = contents
index = __to_html(real_contents, folder_, top_level)

return index

Expand All @@ -119,6 +142,7 @@ def __to_html(contents: List[str], folder: str, top_level: str) -> str:
if folder != "/":
items.append("../")
for c in contents:
# index.html does not need to be included in html content.
if not c.endswith("index.html"):
items.append(c[len(folder):])
else:
Expand Down
77 changes: 61 additions & 16 deletions charon/pkgs/maven.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import charon.pkgs.indexing as indexing
from charon.utils.files import write_file
from charon.utils.archive import extract_zip_all
from charon.utils.strings import remove_prefix
from charon.storage import S3Client
from charon.pkgs.pkg_utils import upload_post_process, rollback_post_process
from charon.config import get_template
Expand Down Expand Up @@ -181,6 +182,7 @@ def handle_maven_uploading(
ignore_patterns=None,
root="maven-repository",
bucket_name=None,
prefix=None,
dir_=None,
do_index=True,
dry_run=False
Expand Down Expand Up @@ -221,18 +223,24 @@ def handle_maven_uploading(
_handle_error(err_msgs)
# Question: should we exit here?

prefix_ = remove_prefix(prefix, "/")
# 4. Do uploading
logger.info("Start uploading files to s3")
s3_client = S3Client(dry_run=dry_run)
bucket = bucket_name
(_, failed_files) = s3_client.upload_files(
file_paths=valid_mvn_paths, bucket_name=bucket, product=prod_key, root=top_level
_, failed_files = s3_client.upload_files(
file_paths=valid_mvn_paths, bucket_name=bucket,
product=prod_key, root=top_level, key_prefix=prefix_
)
logger.info("Files uploading done\n")

# 5. Use uploaded poms to scan s3 for metadata refreshment
logger.info("Start generating maven-metadata.xml files for all artifacts")
meta_files = _generate_metadatas(s3_client, bucket, valid_poms, top_level)
meta_files = _generate_metadatas(
s3=s3_client, bucket=bucket,
poms=valid_poms, root=top_level,
prefix=prefix_
)
logger.info("maven-metadata.xml files generation done\n")

failed_metas = meta_files.get(META_FILE_FAILED, [])
Expand All @@ -243,7 +251,8 @@ def handle_maven_uploading(
meta_file_paths=meta_files[META_FILE_GEN_KEY],
bucket_name=bucket,
product=prod_key,
root=top_level
root=top_level,
key_prefix=prefix_
)
failed_metas.extend(_failed_metas)
logger.info("maven-metadata.xml updating done\n")
Expand All @@ -252,14 +261,18 @@ def handle_maven_uploading(
# index is similar to metadata, it will be overwritten everytime
if do_index:
logger.info("Start generating index files to s3")
created_indexes = indexing.generate_indexes(top_level, valid_dirs, s3_client, bucket)
created_indexes = indexing.generate_indexes(
top_level, valid_dirs, s3_client, bucket, prefix_
)
logger.info("Index files generation done.\n")

logger.info("Start updating index files to s3")
(_, _failed_metas) = s3_client.upload_metadatas(
meta_file_paths=created_indexes,
bucket_name=bucket,
product=None, root=top_level
product=None,
root=top_level,
key_prefix=prefix_
)
failed_metas.extend(_failed_metas)
logger.info("Index files updating done\n")
Expand All @@ -275,6 +288,7 @@ def handle_maven_del(
ignore_patterns=None,
root="maven-repository",
bucket_name=None,
prefix=None,
dir_=None,
do_index=True,
dry_run=False
Expand Down Expand Up @@ -312,6 +326,7 @@ def handle_maven_del(
logger.debug("G: %s, A: %s", g, a)
ga_paths.append(os.path.join("/".join(g.split(".")), a))

prefix_ = remove_prefix(prefix, "/")
# 4. Delete all valid_paths from s3
logger.info("Start deleting files from s3")
s3_client = S3Client(dry_run=dry_run)
Expand All @@ -320,13 +335,19 @@ def handle_maven_del(
valid_mvn_paths,
bucket_name=bucket,
product=prod_key,
root=top_level
root=top_level,
key_prefix=prefix_
)
logger.info("Files deletion done\n")

# 5. Use changed GA to scan s3 for metadata refreshment
logger.info("Start generating maven-metadata.xml files for all changed GAs")
meta_files = _generate_metadatas(s3_client, bucket, valid_poms, top_level)
meta_files = _generate_metadatas(
s3=s3_client, bucket=bucket,
poms=valid_poms, root=top_level,
prefix=prefix_
)

logger.info("maven-metadata.xml files generation done\n")

# 6. Upload all maven-metadata.xml. We need to delete metadata files
Expand All @@ -336,30 +357,38 @@ def handle_maven_del(
for _, files in meta_files.items():
all_meta_files.extend(files)
s3_client.delete_files(
file_paths=all_meta_files, bucket_name=bucket, product=prod_key, root=top_level
file_paths=all_meta_files,
bucket_name=bucket,
product=prod_key,
root=top_level,
key_prefix=prefix_
)
failed_metas = meta_files.get(META_FILE_FAILED, [])
if META_FILE_GEN_KEY in meta_files:
(_, _failed_metas) = s3_client.upload_metadatas(
meta_file_paths=meta_files[META_FILE_GEN_KEY],
bucket_name=bucket,
product=None,
root=top_level
root=top_level,
key_prefix=prefix_
)
failed_metas.extend(_failed_metas)
logger.info("maven-metadata.xml updating done\n")

if do_index:
logger.info("Start generating index files for all changed entries")
created_indexes = indexing.generate_indexes(top_level, valid_dirs, s3_client, bucket)
created_indexes = indexing.generate_indexes(
top_level, valid_dirs, s3_client, bucket, prefix_
)
logger.info("Index files generation done.\n")

logger.info("Start updating index to s3")
(_, _failed_index_files) = s3_client.upload_metadatas(
meta_file_paths=created_indexes,
bucket_name=bucket,
product=None,
root=top_level
root=top_level,
key_prefix=prefix_
)
failed_metas.extend(_failed_index_files)
logger.info("Index files updating done.\n")
Expand Down Expand Up @@ -445,7 +474,9 @@ def _scan_paths(files_root: str, ignore_patterns: List[str],


def _generate_metadatas(
s3: S3Client, bucket: str, poms: List[str], root: str
s3: S3Client, bucket: str,
poms: List[str], root: str,
prefix: str = None
) -> Dict[str, List[str]]:
"""Collect GAVs and generating maven-metadata.xml.
As all valid poms has been stored in s3 bucket,
Expand All @@ -467,9 +498,12 @@ def _generate_metadatas(
for path, _ in gas_dict.items():
# avoid some wrong prefix, like searching a/b
# but got a/b-1
ga_prefix = path
if prefix:
ga_prefix = os.path.join(prefix, path)
if not path.endswith("/"):
path = path + "/"
(existed_poms, success) = s3.get_files(bucket, path, ".pom")
ga_prefix = ga_prefix + "/"
(existed_poms, success) = s3.get_files(bucket, ga_prefix, ".pom")
if len(existed_poms) == 0:
if success:
logger.debug(
Expand All @@ -488,7 +522,13 @@ def _generate_metadatas(
logger.debug(
"Got poms in s3 bucket %s for GA path %s: %s", bucket, path, poms
)
all_poms.extend(existed_poms)
un_prefixed_poms = existed_poms
if prefix:
if not prefix.endswith("/"):
un_prefixed_poms = [__remove_prefix(pom, prefix) for pom in existed_poms]
else:
un_prefixed_poms = [__remove_prefix(pom, prefix + "/") for pom in existed_poms]
all_poms.extend(un_prefixed_poms)
gav_dict = parse_gavs(all_poms)
if len(gav_dict) > 0:
meta_files_generation = []
Expand Down Expand Up @@ -524,6 +564,11 @@ def _handle_error(err_msgs: List[str]):
pass


def __remove_prefix(s: str, prefix: str) -> str:
if s.startswith(prefix):
return s[len(prefix):]


class VersionCompareKey:
'Used as key function for version sorting'
def __init__(self, obj):
Expand Down
Loading